source
stringlengths
3
86
python
stringlengths
75
1.04M
generate-dataset-canny.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # Author : Hongzhuo Liang # E-mail : liang@informatik.uni-hamburg.de # Description: # Date : 20/05/2018 2:45 PM # File Name : generate-dataset-canny.py import numpy as np import sys import pickle from dexnet.grasping.quality import PointGraspMetrics3D from dexnet.grasping import GaussianGraspSampler, AntipodalGraspSampler, UniformGraspSampler, GpgGraspSampler from dexnet.grasping import RobotGripper, GraspableObject3D, GraspQualityConfigFactory, PointGraspSampler import dexnet from autolab_core import YamlConfig from meshpy.obj_file import ObjFile from meshpy.sdf_file import SdfFile import os import multiprocessing import matplotlib.pyplot as plt plt.switch_backend('agg') # for the convenient of run on remote computer def get_file_name(file_dir_): file_list = [] for root, dirs, files in os.walk(file_dir_): if root.count('/') == file_dir_.count('/') + 1: file_list.append(root) file_list.sort() return file_list def do_job(i): object_name = file_list_all[i][len(home_dir) + 35:] good_grasp = multiprocessing.Manager().list() p_set = [multiprocessing.Process(target=worker, args=(i, 100, 20, good_grasp)) for _ in range(50)] # grasp_amount per friction: 20*40 [p.start() for p in p_set] [p.join() for p in p_set] good_grasp = list(good_grasp) good_grasp_file_name = "./generated_grasps/{}_{}_{}".format(filename_prefix, str(object_name), str(len(good_grasp))) with open(good_grasp_file_name + '.pickle', 'wb') as f: pickle.dump(good_grasp, f) tmp = [] for grasp in good_grasp: grasp_config = grasp[0].configuration score_friction = grasp[1] score_canny = grasp[2] tmp.append(np.concatenate([grasp_config, [score_friction, score_canny]])) np.save(good_grasp_file_name + '.npy', np.array(tmp)) print("finished job ", object_name) def worker(i, sample_nums, grasp_amount, good_grasp): object_name = file_list_all[i][len(home_dir) + 35:] print('a worker of task {} start'.format(object_name)) yaml_config = YamlConfig(home_dir + "/container_catkin_ws/src/grasp-pointnet/dex-net/test/config.yaml") gripper_name = 'robotiq_85' gripper = RobotGripper.load(gripper_name, home_dir + "/container_catkin_ws/src/grasp-pointnet/dex-net/data/grippers") grasp_sample_method = "antipodal" if grasp_sample_method == "uniform": ags = UniformGraspSampler(gripper, yaml_config) elif grasp_sample_method == "gaussian": ags = GaussianGraspSampler(gripper, yaml_config) elif grasp_sample_method == "antipodal": ags = AntipodalGraspSampler(gripper, yaml_config) elif grasp_sample_method == "gpg": ags = GpgGraspSampler(gripper, yaml_config) elif grasp_sample_method == "point": ags = PointGraspSampler(gripper, yaml_config) else: raise NameError("Can't support this sampler") print("Log: do job", i) if os.path.exists(str(file_list_all[i]) + "/google_512k/nontextured.obj"): of = ObjFile(str(file_list_all[i]) + "/google_512k/nontextured.obj") sf = SdfFile(str(file_list_all[i]) + "/google_512k/nontextured.sdf") else: print("can't find any obj or sdf file!") raise NameError("can't find any obj or sdf file!") mesh = of.read() sdf = sf.read() obj = GraspableObject3D(sdf, mesh) print("Log: opened object", i + 1, object_name) force_closure_quality_config = {} canny_quality_config = {} fc_list_sub1 = np.arange(2.0, 0.75, -0.4) fc_list_sub2 = np.arange(0.5, 0.36, -0.05) fc_list = np.concatenate([fc_list_sub1, fc_list_sub2]) for value_fc in fc_list: value_fc = round(value_fc, 2) yaml_config['metrics']['force_closure']['friction_coef'] = value_fc yaml_config['metrics']['robust_ferrari_canny']['friction_coef'] = value_fc force_closure_quality_config[value_fc] = GraspQualityConfigFactory.create_config( yaml_config['metrics']['force_closure']) canny_quality_config[value_fc] = GraspQualityConfigFactory.create_config( yaml_config['metrics']['robust_ferrari_canny']) good_count_perfect = np.zeros(len(fc_list)) count = 0 minimum_grasp_per_fc = grasp_amount while np.sum(good_count_perfect < minimum_grasp_per_fc) != 0: grasps = ags.generate_grasps(obj, target_num_grasps=sample_nums, grasp_gen_mult=10, vis=False, random_approach_angle=True) count += len(grasps) for j in grasps: tmp, is_force_closure = False, False for ind_, value_fc in enumerate(fc_list): value_fc = round(value_fc, 2) tmp = is_force_closure is_force_closure = PointGraspMetrics3D.grasp_quality(j, obj, force_closure_quality_config[value_fc], vis=False) if tmp and not is_force_closure: if good_count_perfect[ind_ - 1] < minimum_grasp_per_fc: canny_quality = PointGraspMetrics3D.grasp_quality(j, obj, canny_quality_config[ round(fc_list[ind_ - 1], 2)], vis=False) good_grasp.append((j, round(fc_list[ind_ - 1], 2), canny_quality)) good_count_perfect[ind_ - 1] += 1 break elif is_force_closure and value_fc == fc_list[-1]: if good_count_perfect[ind_] < minimum_grasp_per_fc: canny_quality = PointGraspMetrics3D.grasp_quality(j, obj, canny_quality_config[value_fc], vis=False) good_grasp.append((j, value_fc, canny_quality)) good_count_perfect[ind_] += 1 break print('Object:{} GoodGrasp:{}'.format(object_name, good_count_perfect)) object_name_len = len(object_name) object_name_ = str(object_name) + " " * (25 - object_name_len) if count == 0: good_grasp_rate = 0 else: good_grasp_rate = len(good_grasp) / count print('Gripper:{} Object:{} Rate:{:.4f} {}/{}'. format(gripper_name, object_name_, good_grasp_rate, len(good_grasp), count)) if __name__ == '__main__': if len(sys.argv) > 1: filename_prefix = sys.argv[1] else: filename_prefix = "default" home_dir = '/home/tpatten' #os.environ['HOME'] file_dir = home_dir + "/Data/ycb_meshes_google/objects" file_list_all = get_file_name(file_dir) object_numbers = file_list_all.__len__() job_list = np.arange(object_numbers) job_list = list(job_list) pool_size = 1 # number of jobs did at same time assert (pool_size <= len(job_list)) # Initialize pool pool = [] for _ in range(pool_size): job_i = job_list.pop(0) pool.append(multiprocessing.Process(target=do_job, args=(job_i,))) [p.start() for p in pool] # refill while len(job_list) > 0: for ind, p in enumerate(pool): if not p.is_alive(): pool.pop(ind) job_i = job_list.pop(0) p = multiprocessing.Process(target=do_job, args=(job_i,)) p.start() pool.append(p) break print('All job done.')
Chap10_Example10.13.py
from threading import * from time import sleep def display(num1,num2): print(f"{current_thread().name} thread started") sleep(1) mul = num1 * num2 print(f"{current_thread().name} executing display function with value {mul}") myt1 = Thread(target = display, name= "MyChildThread1",args = (10,20)) myt2 = Thread(target = display, name= "MyChildThread2",args = (30,40)) myt3 = Thread(target = display, name= "MyChildThread3",args = (50,60)) print("The total number of active threads before child thread start are: ", active_count()) myt1.start() myt2.start() myt3.start() print(f"Is {myt1.name} alive: ",myt1.isAlive()) print(f"Is {myt2.name} alive: ",myt2.isAlive()) print(f"Is {myt3.name} alive: ",myt3.isAlive()) sleep(5) print(f"Is {myt1.name} alive after MainThread sleep for 5 secs: ",myt1.isAlive()) print(f"Is {myt2.name} alive after MainThread sleep for 5 secs: ",myt2.isAlive()) print(f"Is {myt3.name} alive after MainThread sleep for 5 secs: ",myt3.isAlive())
__init__.py
#!/usr/bin/env python2 # -*- coding: utf-8 -*- """ Implements context management so that nested/scoped contexts and threaded contexts work properly and as expected. """ import collections import functools import logging import platform import string import sys import threading import time from ..timeout import Timeout class _devnull(object): name = None def write(self, *a, **kw): pass def read(self, *a, **kw): return '' def flush(self, *a, **kw): pass def close(self, *a, **kw): pass class _defaultdict(dict): """ Dictionary which loads missing keys from another dictionary. This is neccesary because the ``default_factory`` method of :class:`collections.defaultdict` does not provide the key. Examples: >>> a = {'foo': 'bar'} >>> b = pwnlib.context._defaultdict(a) >>> b['foo'] 'bar' >>> 'foo' in b False >>> b['foo'] = 'baz' >>> b['foo'] 'baz' >>> del b['foo'] >>> b['foo'] 'bar' >>> a = {'foo': 'bar'} >>> b = pwnlib.context._defaultdict(a) >>> b['baz'] #doctest: +ELLIPSIS Traceback (most recent call last): ... KeyError: 'baz' """ def __init__(self, default=None): super(_defaultdict, self).__init__() if default is None: default = {} self.default = default def __missing__(self, key): return self.default[key] class _DictStack(object): """ Manages a dictionary-like object, permitting saving and restoring from a stack of states via :func:`push` and :func:`pop`. The underlying object used as ``default`` must implement ``copy``, ``clear``, and ``update``. Examples: >>> t = pwnlib.context._DictStack(default={}) >>> t['key'] = 'value' >>> t {'key': 'value'} >>> t.push() >>> t {'key': 'value'} >>> t['key'] = 'value2' >>> t {'key': 'value2'} >>> t.pop() >>> t {'key': 'value'} """ def __init__(self, default): self._current = _defaultdict(default) self.__stack = [] def push(self): self.__stack.append(self._current.copy()) def pop(self): self._current.clear() self._current.update(self.__stack.pop()) def copy(self): return self._current.copy() # Pass-through container emulation routines def __len__(self): return self._current.__len__() def __delitem__(self, k): return self._current.__delitem__(k) def __getitem__(self, k): return self._current.__getitem__(k) def __setitem__(self, k, v): return self._current.__setitem__(k, v) def __contains__(self, k): return self._current.__contains__(k) def __iter__(self): return self._current.__iter__() def __repr__(self): return self._current.__repr__() def __eq__(self, other): return self._current.__eq__(other) # Required for keyword expansion operator ** to work def keys(self): return self._current.keys() def values(self): return self._current.values() def items(self): return self._current.items() class _Tls_DictStack(threading.local, _DictStack): """ Per-thread implementation of :class:`_DictStack`. Examples: >>> t = pwnlib.context._Tls_DictStack({}) >>> t['key'] = 'value' >>> print t {'key': 'value'} >>> def p(): print t >>> thread = threading.Thread(target=p) >>> _ = (thread.start(), thread.join()) {} """ pass def _validator(validator): """ Validator that tis tightly coupled to the implementation of the classes here. This expects that the object has a ._tls property which is of type _DictStack. """ name = validator.__name__ doc = validator.__doc__ def fget(self): return self._tls[name] def fset(self, val): self._tls[name] = validator(self, val) def fdel(self): self._tls._current.pop(name,None) return property(fget, fset, fdel, doc) class Thread(threading.Thread): """ Instantiates a context-aware thread, which inherit its context when it is instantiated. The class can be accessed both on the context module as `pwnlib.context.Thread` and on the context singleton object inside the context module as `pwnlib.context.context.Thread`. Threads created by using the native :class`threading`.Thread` will have a clean (default) context. Regardless of the mechanism used to create any thread, the context is de-coupled from the parent thread, so changes do not cascade to child or parent. Saves a copy of the context when instantiated (at ``__init__``) and updates the new thread's context before passing control to the user code via ``run`` or ``target=``. Examples: >>> context.clear() >>> context.update(arch='arm') >>> def p(): ... print context.arch ... context.arch = 'mips' ... print context.arch >>> # Note that a normal Thread starts with a clean context >>> # (i386 is the default architecture) >>> t = threading.Thread(target=p) >>> _=(t.start(), t.join()) i386 mips >>> # Note that the main Thread's context is unchanged >>> print context.arch arm >>> # Note that a context-aware Thread receives a copy of the context >>> t = pwnlib.context.Thread(target=p) >>> _=(t.start(), t.join()) arm mips >>> # Again, the main thread is unchanged >>> print context.arch arm Implementation Details: This class implemented by hooking the private function :func:`threading.Thread._Thread_bootstrap`, which is called before passing control to :func:`threading.Thread.run`. This could be done by overriding ``run`` itself, but we would have to ensure that all uses of the class would only ever use the keyword ``target=`` for ``__init__``, or that all subclasses invoke ``super(Subclass.self).set_up_context()`` or similar. """ def __init__(self, *args, **kwargs): super(Thread, self).__init__(*args, **kwargs) self.old = context.copy() def __bootstrap(self): """ Implementation Details: This only works because the class is named ``Thread``. If its name is changed, we have to implement this hook differently. """ context.update(**self.old) super(Thread, self).__bootstrap() def _longest(d): """ Returns an OrderedDict with the contents of the input dictionary ``d`` sorted by the length of the keys, in descending order. This is useful for performing substring matching via ``str.startswith``, as it ensures the most complete match will be found. >>> data = {'a': 1, 'bb': 2, 'ccc': 3} >>> _longest(data) == data True >>> for i in _longest(data): print i ccc bb a """ return collections.OrderedDict((k,d[k]) for k in sorted(d, key=len, reverse=True)) def TlsProperty(object): def __get__(self, obj, objtype=None): return obj._tls class ContextType(object): r""" Class for specifying information about the target machine. Intended for use as a pseudo-singleton through the global variable ``pwnlib.context.context``, available via ``from pwn import *`` as ``context``. The context is usually specified at the top of the Python file for clarity. :: #!/usr/bin/env python context.update(arch='i386', os='linux') Currently supported properties and their defaults are listed below. The defaults are inherited from :data:`pwnlib.context.ContextType.defaults`. Additionally, the context is thread-aware when using :class:`pwnlib.context.Thread` instead of :class:`threading.Thread` (all internal ``binjitsu`` threads use the former). The context is also scope-aware by using the ``with`` keyword. Examples: >>> context.clear() >>> context.update(os='linux') # doctest: +ELLIPSIS >>> context.os == 'linux' True >>> context.arch = 'arm' >>> vars(context) == {'arch': 'arm', 'bits': 32, 'endian': 'little', 'os': 'linux'} True >>> context.endian 'little' >>> context.bits 32 >>> def nop(): ... print pwnlib.asm.asm('nop').encode('hex') >>> nop() 00f020e3 >>> with context.local(arch = 'i386'): ... nop() 90 >>> from pwnlib.context import Thread as PwnThread >>> from threading import Thread as NormalThread >>> with context.local(arch = 'mips'): ... pwnthread = PwnThread(target=nop) ... thread = NormalThread(target=nop) >>> # Normal thread uses the default value for arch, 'i386' >>> _=(thread.start(), thread.join()) 90 >>> # Pwnthread uses the correct context from creation-time >>> _=(pwnthread.start(), pwnthread.join()) 00000000 >>> nop() 00f020e3 """ # # Use of 'slots' is a heavy-handed way to prevent accidents # like 'context.architecture=' instead of 'context.arch='. # # Setting any properties on a ContextType object will throw an # exception. # __slots__ = '_tls', #: Default values for :class:`pwnlib.context.ContextType` defaults = { 'arch': 'i386', 'binary': None, 'bits': 32, 'endian': 'little', 'log_level': logging.INFO, 'log_file': _devnull(), 'newline': '\n', 'os': 'linux', 'signed': False, 'timeout': Timeout.maximum, } #: Valid values for :meth:`pwnlib.context.ContextType.os` oses = sorted(('linux','freebsd','windows')) big_32 = {'endian': 'big', 'bits': 32} big_64 = {'endian': 'big', 'bits': 64} little_8 = {'endian': 'little', 'bits': 8} little_16 = {'endian': 'little', 'bits': 16} little_32 = {'endian': 'little', 'bits': 32} little_64 = {'endian': 'little', 'bits': 64} #: Keys are valid values for :meth:`pwnlib.context.ContextType.arch`. # #: Values are defaults which are set when #: :attr:`pwnlib.context.ContextType.arch` is set architectures = _longest({ 'aarch64': little_64, 'alpha': little_64, 'avr': little_8, 'amd64': little_64, 'arm': little_32, 'cris': little_32, 'i386': little_32, 'ia64': big_64, 'm68k': big_32, 'mips': little_32, 'mips64': little_64, 'msp430': little_16, 'powerpc': big_32, 'powerpc64': big_64, 's390': big_32, 'sparc': big_32, 'sparc64': big_64, 'thumb': little_32, 'vax': little_32, }) #: Valid values for :attr:`endian` endiannesses = _longest({ 'be': 'big', 'eb': 'big', 'big': 'big', 'le': 'little', 'el': 'little', 'little': 'little' }) #: Valid string values for :attr:`signed` signednesses = { 'unsigned': False, 'no': False, 'yes': True, 'signed': True } valid_signed = sorted(signednesses) def __init__(self, **kwargs): """ Initialize the ContextType structure. All keyword arguments are passed to :func:`update`. """ self._tls = _Tls_DictStack(_defaultdict(ContextType.defaults)) self.update(**kwargs) def copy(self): """copy() -> dict Returns a copy of the current context as a dictionary. Examples: >>> context.clear() >>> context.os = 'linux' >>> vars(context) == {'os': 'linux'} True """ return self._tls.copy() @property def __dict__(self): return self.copy() def update(self, *args, **kwargs): """ Convenience function, which is shorthand for setting multiple variables at once. It is a simple shorthand such that:: context.update(os = 'linux', arch = 'arm', ...) is equivalent to:: context.os = 'linux' context.arch = 'arm' ... The following syntax is also valid:: context.update({'os': 'linux', 'arch': 'arm'}) Arguments: kwargs: Variables to be assigned in the environment. Examples: >>> context.clear() >>> context.update(arch = 'i386', os = 'linux') >>> context.arch, context.os ('i386', 'linux') """ for arg in args: self.update(**arg) for k,v in kwargs.items(): setattr(self,k,v) def __repr__(self): v = sorted("%s = %r" % (k,v) for k,v in self._tls._current.items()) return '%s(%s)' % (self.__class__.__name__, ', '.join(v)) def local(self, **kwargs): """local(**kwargs) -> context manager Create a context manager for use with the ``with`` statement. For more information, see the example below or PEP 343. Arguments: kwargs: Variables to be assigned in the new environment. Returns: ContextType manager for managing the old and new environment. Examples: >>> context.clear() >>> context.timeout = 1 >>> context.timeout == 1 True >>> print context.timeout 1.0 >>> with context.local(timeout = 2): ... print context.timeout ... context.timeout = 3 ... print context.timeout 2.0 3.0 >>> print context.timeout 1.0 """ class LocalContext(object): def __enter__(a): self._tls.push() self.update(**{k:v for k,v in kwargs.items() if v is not None}) return self def __exit__(a, *b, **c): self._tls.pop() return LocalContext() def clear(self): """ Clears the contents of the context. All values are set to their defaults. Examples: >>> # Default value >>> context.arch == 'i386' True >>> context.arch = 'arm' >>> context.arch == 'i386' False >>> context.clear() >>> context.arch == 'i386' True """ self._tls._current.clear() @property def native(self): arch = context.arch with context.local(arch = platform.machine()): platform_arch = context.arch if arch in ('i386', 'amd64') and platform_arch in ('i386', 'amd64'): return True return arch == platform_arch @_validator def arch(self, arch): """ Target machine architecture. Allowed values are listed in :attr:`pwnlib.context.ContextType.architectures`. Side Effects: If an architecture is specified which also implies additional attributes (e.g. 'amd64' implies 64-bit words, 'powerpc' implies big-endian), these attributes will be set on the context if a user has not already set a value. The following properties may be modified. - :attr:`bits` - :attr:`endian` Raises: AttributeError: An invalid architecture was specified Examples: >>> context.clear() >>> context.arch == 'i386' # Default architecture True >>> context.arch = 'mips' >>> context.arch == 'mips' True >>> context.arch = 'doge' #doctest: +ELLIPSIS Traceback (most recent call last): ... AttributeError: arch must be one of ['aarch64', ..., 'thumb'] >>> context.arch = 'ppc' >>> context.arch == 'powerpc' # Aliased architecture True >>> context.clear() >>> context.bits == 32 # Default value True >>> context.arch = 'amd64' >>> context.bits == 64 # New value True Note that expressly setting :attr:`bits` means that we use that value instead of the default >>> context.clear() >>> context.bits = 32 >>> context.arch = 'amd64' >>> context.bits == 32 True Setting the architecture can override the defaults for both :attr:`endian` and :attr:`bits` >>> context.clear() >>> context.arch = 'powerpc64' >>> vars(context) == {'arch': 'powerpc64', 'bits': 64, 'endian': 'big'} True """ # Lowercase, remove everything non-alphanumeric arch = arch.lower() arch = arch.replace(string.punctuation, '') # Attempt to perform convenience and legacy compatibility # transformations. transform = {'x86':'i386', 'ppc': 'powerpc', 'x86_64': 'amd64'} for k, v in transform.items(): if arch.startswith(k): arch = arch.replace(k,v,1) try: defaults = ContextType.architectures[arch] except KeyError: raise AttributeError('AttributeError: arch must be one of %r' % sorted(ContextType.architectures)) for k,v in ContextType.architectures[arch].items(): if k not in self._tls: self._tls[k] = v return arch @_validator def bits(self, bits): """ Target machine word size, in bits (i.e. the size of general purpose registers). The default value is ``32``, but changes according to :attr:`arch`. Examples: >>> context.clear() >>> context.bits == 32 True >>> context.bits = 64 >>> context.bits == 64 True >>> context.bits = -1 #doctest: +ELLIPSIS Traceback (most recent call last): ... AttributeError: bits must be >= 0 (-1) """ bits = int(bits) if bits <= 0: raise AttributeError("bits must be >= 0 (%r)" % bits) return bits @_validator def binary(self, binary): """ Infer target architecture, bit-with, and endianness from a binary file. Data type is a :class:`pwnlib.elf.ELF` object. Examples: >>> context.clear() >>> context.arch, context.bits ('i386', 32) >>> context.binary = '/bin/bash' >>> context.arch, context.bits ('amd64', 64) >>> context.binary ELF('/bin/bash') """ # Cyclic imports... sorry Idolf. from ..elf import ELF if not isinstance(binary, ELF): binary = ELF(binary) self.arch = binary.arch self.bits = binary.bits self.endian = binary.endian return binary @property def bytes(self): """ Target machine word size, in bytes (i.e. the size of general purpose registers). This is a convenience wrapper around ``bits / 8``. Examples: >>> context.bytes = 1 >>> context.bits == 8 True >>> context.bytes = 0 #doctest: +ELLIPSIS Traceback (most recent call last): ... AttributeError: bits must be >= 0 (0) """ return self.bits/8 @bytes.setter def bytes(self, value): self.bits = value*8 @_validator def endian(self, endianness): """ Endianness of the target machine. The default value is ``'little'``, but changes according to :attr:`arch`. Raises: AttributeError: An invalid endianness was provided Examples: >>> context.clear() >>> context.endian == 'little' True >>> context.endian = 'big' >>> context.endian 'big' >>> context.endian = 'be' >>> context.endian == 'big' True >>> context.endian = 'foobar' #doctest: +ELLIPSIS Traceback (most recent call last): ... AttributeError: endian must be one of ['be', 'big', 'eb', 'el', 'le', 'little'] """ endian = endianness.lower() if endian not in ContextType.endiannesses: raise AttributeError("endian must be one of %r" % sorted(ContextType.endiannesses)) return ContextType.endiannesses[endian] @_validator def log_level(self, value): """ Sets the verbosity of ``binjitsu`` logging mechanism. More specifically it controls the filtering of messages that happens inside the handler for logging to the screen. So if you want e.g. log all messages to a file, then this attribute makes no difference to you. Valid values are specified by the standard Python ``logging`` module. Default value is set to ``INFO``. Examples: >>> context.log_level = 'error' >>> context.log_level == logging.ERROR True >>> context.log_level = 10 >>> context.log_level = 'foobar' #doctest: +ELLIPSIS Traceback (most recent call last): ... AttributeError: log_level must be an integer or one of ['CRITICAL', 'DEBUG', 'ERROR', 'INFO', 'NOTSET', 'WARN', 'WARNING'] """ # If it can be converted into an int, success try: return int(value) except ValueError: pass # If it is defined in the logging module, success try: return getattr(logging, value.upper()) except AttributeError: pass # Otherwise, fail level_names = filter(lambda x: isinstance(x,str), logging._levelNames) permitted = sorted(level_names) raise AttributeError('log_level must be an integer or one of %r' % permitted) @_validator def log_file(self, value): r""" Sets the target file for all logging output. Works in a similar fashion to :attr:`log_level`. Examples: >>> context.log_file = 'foo.txt' #doctest: +ELLIPSIS >>> log.debug('Hello!') #doctest: +ELLIPSIS >>> with context.local(log_level='ERROR'): #doctest: +ELLIPSIS ... log.info('Hello again!') >>> with context.local(log_file='bar.txt'): ... log.debug('Hello from bar!') >>> log.info('Hello from foo!') >>> file('foo.txt').readlines()[-3] #doctest: +ELLIPSIS '...:DEBUG:...:Hello!\n' >>> file('foo.txt').readlines()[-2] #doctest: +ELLIPSIS '...:INFO:...:Hello again!\n' >>> file('foo.txt').readlines()[-1] #doctest: +ELLIPSIS '...:INFO:...:Hello from foo!\n' >>> file('bar.txt').readlines()[-1] #doctest: +ELLIPSIS '...:DEBUG:...:Hello from bar!\n' """ if isinstance(value, (str,unicode)): modes = ('w', 'wb', 'a', 'ab') # check if mode was specified as "[value],[mode]" if ',' not in value: value += ',a' filename, mode = value.rsplit(',', 1) value = open(filename, mode) elif not isinstance(value, (file)): raise AttributeError('log_file must be a file') iso_8601 = '%Y-%m-%dT%H:%M:%S' lines = [ '=' * 78, ' Started at %s ' % time.strftime(iso_8601), ' sys.argv = [', ] for arg in sys.argv: lines.append(' %r,' % arg) lines.append(' ]') lines.append('=' * 78) for line in lines: value.write('=%-78s=\n' % line) value.flush() return value @_validator def os(self, os): """ Operating system of the target machine. The default value is ``linux``. Allowed values are listed in :attr:`pwnlib.context.ContextType.oses`. Examples: >>> context.os = 'linux' >>> context.os = 'foobar' #doctest: +ELLIPSIS Traceback (most recent call last): ... AttributeError: os must be one of ['freebsd', 'linux', 'windows'] """ os = os.lower() if os not in ContextType.oses: raise AttributeError("os must be one of %r" % sorted(ContextType.oses)) return os @_validator def signed(self, signed): """ Signed-ness for packing operation when it's not explicitly set. Can be set to any non-string truthy value, or the specific string values ``'signed'`` or ``'unsigned'`` which are converted into ``True`` and ``False`` correspondingly. Examples: >>> context.signed False >>> context.signed = 1 >>> context.signed True >>> context.signed = 'signed' >>> context.signed True >>> context.signed = 'unsigned' >>> context.signed False >>> context.signed = 'foobar' #doctest: +ELLIPSIS Traceback (most recent call last): ... AttributeError: signed must be one of ['no', 'signed', 'unsigned', 'yes'] or a non-string truthy value """ try: signed = ContextType.signednesses[signed] except KeyError: pass if isinstance(signed, str): raise AttributeError('signed must be one of %r or a non-string truthy value' % sorted(ContextType.signednesses)) return bool(signed) @_validator def timeout(self, value=Timeout.default): """ Default amount of time to wait for a blocking operation before it times out, specified in seconds. The default value is to have an infinite timeout. See :class:`pwnlib.timeout.Timeout` for additional information on valid values. """ return Timeout(value).timeout #************************************************************************* # ALIASES #************************************************************************* # # These fields are aliases for fields defined above, either for # convenience or compatibility. # #************************************************************************* def __call__(self, **kwargs): """ Alias for :meth:`pwnlib.context.ContextType.update` """ return self.update(**kwargs) def reset_local(self): """ Deprecated. Use :meth:`clear`. """ self.clear() @property def endianness(self): """ Legacy alias for :attr:`endian`. Examples: >>> context.endian == context.endianness True """ return self.endian @endianness.setter def endianness(self, value): self.endian = value @property def sign(self): """ Alias for :attr:`signed` """ return self.signed @sign.setter def sign(self, value): self.signed = value @property def signedness(self): """ Alias for :attr:`signed` """ return self.signed @signedness.setter def signedness(self, value): self.signed = value @property def word_size(self): """ Alias for :attr:`bits` """ return self.bits @word_size.setter def word_size(self, value): self.bits = value Thread = Thread #: Global ``context`` object, used to store commonly-used binjitsu settings. #: In most cases, the context is used to infer default variables values. #: For example, :meth:`pwnlib.asm.asm` can take an ``os`` parameter as a #: keyword argument. If it is not supplied, the ``os`` specified by #: ``context`` is used instead. #: Consider it a shorthand to passing ``os=`` and ``arch=`` to every single #: function call. context = ContextType() def LocalContext(function): """ Wraps the specied function on a context.local() block, using kwargs. Example: >>> @LocalContext ... def printArch(): ... print(context.arch) >>> printArch() i386 >>> printArch(arch='arm') arm """ @functools.wraps(function) def setter(*a, **kw): with context.local(**{k:kw.pop(k) for k,v in kw.items() if isinstance(getattr(ContextType, k, None), property)}): return function(*a, **kw) return setter
test_api_gunicorn_scheduler.py
import json import os import shutil import sys import time import unittest from multiprocessing import Process import requests from app import api from app import configs from app import schedulerv2 from app import sync from common import posts EC = configs.EnjoliverConfig(importer=__file__) EC.api_uri = "http://127.0.0.1:5000" class TestAPIGunicornScheduler(unittest.TestCase): p_matchbox = Process p_api = Process inte_path = "%s" % os.path.dirname(__file__) dbs_path = "%s/dbs" % inte_path tests_path = "%s" % os.path.dirname(inte_path) app_path = os.path.dirname(tests_path) project_path = os.path.dirname(app_path) matchbox_path = "%s/matchbox" % project_path assets_path = "%s/matchbox/assets" % project_path test_matchbox_path = "%s/test_matchbox" % tests_path api_discovery = "%s/discovery" % EC.api_uri @staticmethod def process_target_matchbox(): os.environ["ENJOLIVER_MATCHBOX_PATH"] = TestAPIGunicornScheduler.test_matchbox_path os.environ["ENJOLIVER_MATCHBOX_ASSETS"] = TestAPIGunicornScheduler.assets_path cmd = [ "%s" % sys.executable, "%s/manage.py" % TestAPIGunicornScheduler.project_path, "matchbox" ] print("PID -> %s\n" "exec -> %s\n" % ( os.getpid(), " ".join(cmd))) sys.stdout.flush() os.execve(cmd[0], cmd, os.environ) @staticmethod def clean_sandbox(): dirs = ["%s/%s" % (TestAPIGunicornScheduler.test_matchbox_path, k) for k in ("profiles", "groups")] for d in dirs: for f in os.listdir(d): if ".json" in f: print("\r-> remove %s\n\r" % f) os.remove("%s/%s" % (d, f)) @staticmethod def process_target_api(): cmd = [ "%s" % sys.executable, "%s/manage.py" % TestAPIGunicornScheduler.project_path, "gunicorn" ] os.execve(cmd[0], cmd, os.environ) @classmethod def setUpClass(cls): time.sleep(0.1) shutil.rmtree(EC.ignition_journal_dir, ignore_errors=True) cls.clean_sandbox() smart = api.SmartDatabaseClient(EC.db_uri) api.SMART = smart api.SMART.create_base() cls.p_matchbox = Process(target=TestAPIGunicornScheduler.process_target_matchbox) cls.p_api = Process(target=TestAPIGunicornScheduler.process_target_api) print("PPID -> %s\n" % os.getpid()) cls.p_matchbox.start() assert cls.p_matchbox.is_alive() is True cls.p_api.start() assert cls.p_api.is_alive() is True cls.matchbox_running(EC.matchbox_uri, cls.p_matchbox) cls.api_running(EC.api_uri, cls.p_api) @classmethod def tearDownClass(cls): print("TERM -> %d\n" % cls.p_matchbox.pid) sys.stdout.flush() cls.p_matchbox.terminate() cls.p_matchbox.join(timeout=5) cls.p_api.terminate() cls.p_api.join(timeout=5) time.sleep(0.2) @staticmethod def matchbox_running(matchbox_endpoint, p_matchbox): response_body = "" response_code = 404 for i in range(10): assert p_matchbox.is_alive() is True try: request = requests.get(matchbox_endpoint) response_body = request.content response_code = request.status_code request.close() break except requests.exceptions.ConnectionError: pass time.sleep(0.2) print(response_body) assert b"matchbox\n" == response_body assert 200 == response_code @staticmethod def api_running(api_endpoint, p_api): response_code = 404 for i in range(10): assert p_api.is_alive() is True try: request = requests.get(api_endpoint) response_code = request.status_code request.close() break except requests.exceptions.ConnectionError: pass time.sleep(0.2) assert 200 == response_code def setUp(self): self.assertTrue(self.p_matchbox.is_alive()) self.assertTrue(self.p_api.is_alive()) self.api_healthz() def api_healthz(self): expect = { u'flask': True, u'global': False, u'db': True, 'discovery': {'ignition': False, 'ipxe': False}, u'matchbox': { u'/': True, u'/boot.ipxe': True, u'/boot.ipxe.0': True, u'/assets': True, u"/metadata": True }} request = requests.get("%s/healthz" % EC.api_uri) response_body = request.content response_code = request.status_code request.close() self.assertEqual(json.loads(response_body.decode()), expect) self.assertEqual(503, response_code) # @unittest.skip("skip") class TestEtcdMemberKubernetesControlPlane1(TestAPIGunicornScheduler): def test_01(self): r = requests.post(self.api_discovery, data=json.dumps(posts.M01)) self.assertEqual(r.status_code, 200) sch = schedulerv2.EtcdMemberKubernetesControlPlane(EC.api_uri) sch.expected_nb = 1 self.assertTrue(sch.apply()) self.assertTrue(sch.apply()) class TestEtcdMemberKubernetesControlPlane2(TestAPIGunicornScheduler): def test_02(self): r = requests.post(self.api_discovery, data=json.dumps(posts.M01)) r.close() self.assertEqual(r.status_code, 200) sch = schedulerv2.EtcdMemberKubernetesControlPlane(EC.api_uri) self.assertFalse(sch.apply()) self.assertFalse(sch.apply()) r = requests.post(self.api_discovery, data=json.dumps(posts.M02)) r.close() self.assertFalse(sch.apply()) r = requests.post(self.api_discovery, data=json.dumps(posts.M03)) r.close() self.assertTrue(sch.apply()) s = sync.ConfigSyncSchedules( EC.api_uri, self.test_matchbox_path, ignition_dict={ "etcd_member_kubernetes_control_plane": "inte-testapigunicornscheduler-etcd-k8s-cp", "kubernetes_nodes": "inte-testapigunicornscheduler-etcd-k8s-cp", } ) s.apply() class TestEtcdMemberKubernetesControlPlane3(TestAPIGunicornScheduler): def test_03(self): r = requests.post(self.api_discovery, data=json.dumps(posts.M01)) r.close() self.assertEqual(r.status_code, 200) sch = schedulerv2.EtcdMemberKubernetesControlPlane(EC.api_uri) self.assertFalse(sch.apply()) self.assertFalse(sch.apply()) r = requests.post(self.api_discovery, data=json.dumps(posts.M02)) r.close() self.assertFalse(sch.apply()) r = requests.post(self.api_discovery, data=json.dumps(posts.M03)) r.close() self.assertTrue(sch.apply()) sch_no = schedulerv2.KubernetesNode(EC.api_uri, True) self.assertEqual(0, sch_no.apply()) r = requests.post(self.api_discovery, data=json.dumps(posts.M04)) r.close() self.assertEqual(1, sch_no.apply()) s = sync.ConfigSyncSchedules( EC.api_uri, self.test_matchbox_path, ignition_dict={ "etcd_member_kubernetes_control_plane": "inte-testapigunicornscheduler-etcd-k8s-cp", "kubernetes_nodes": "inte-testapigunicornscheduler-etcd-k8s-cp", }, ) s.apply() class TestEtcdMemberKubernetesControlPlane4(TestAPIGunicornScheduler): def test_04(self): for p in posts.ALL: r = requests.post(self.api_discovery, data=json.dumps(p)) self.assertEqual(r.status_code, 200) r.close() sch_no = schedulerv2.KubernetesNode(EC.api_uri, True) self.assertEqual(len(posts.ALL) - schedulerv2.EtcdMemberKubernetesControlPlane.expected_nb, sch_no.apply()) s = sync.ConfigSyncSchedules( EC.api_uri, self.test_matchbox_path, ignition_dict={ "etcd_member_kubernetes_control_plane": "inte-testapigunicornscheduler-etcd-k8s-cp", "kubernetes_nodes": "inte-testapigunicornscheduler-etcd-k8s-cp", }, extra_selector_dict={"os": "installed"}, ) s.apply()
notify.py
#!/usr/bin/env python3 # _*_ coding:utf-8 _*_ import base64 import hashlib import hmac import json import os import re import threading import time import urllib.parse import requests # 原先的 print 函数和主线程的锁 _print = print mutex = threading.Lock() # 定义新的 print 函数 def print(text, *args, **kw): """ 使输出有序进行,不出现多线程同一时间输出导致错乱的问题。 """ with mutex: _print(text, *args, **kw) # 通知服务 # fmt: off push_config = { 'HITOKOTO': False, # 启用一言(随机句子) 'BARK_PUSH': '', # bark IP 或设备码,例:https://api.day.app/DxHcxxxxxRxxxxxxcm/ 'BARK_ARCHIVE': '', # bark 推送是否存档 'BARK_GROUP': '', # bark 推送分组 'BARK_SOUND': '', # bark 推送声音 'CONSOLE': True, # 控制台输出 'DD_BOT_SECRET': '', # 钉钉机器人的 DD_BOT_SECRET 'DD_BOT_TOKEN': '', # 钉钉机器人的 DD_BOT_TOKEN 'FSKEY': '', # 飞书机器人的 FSKEY 'GOBOT_URL': '', # go-cqhttp # 推送到个人QQ:http://127.0.0.1/send_private_msg # 群:http://127.0.0.1/send_group_msg 'GOBOT_QQ': '', # go-cqhttp 的推送群或用户 # GOBOT_URL 设置 /send_private_msg 时填入 user_id=个人QQ # /send_group_msg 时填入 group_id=QQ群 'GOBOT_TOKEN': '', # go-cqhttp 的 access_token 'IGOT_PUSH_KEY': '', # iGot 聚合推送的 IGOT_PUSH_KEY 'PUSH_KEY': '', # server 酱的 PUSH_KEY,兼容旧版与 Turbo 版 'PUSH_PLUS_TOKEN': '', # push+ 微信推送的用户令牌 'PUSH_PLUS_USER': '', # push+ 微信推送的群组编码 'QMSG_KEY': '', # qmsg 酱的 QMSG_KEY 'QMSG_TYPE': '', # qmsg 酱的 QMSG_TYPE 'QYWX_AM': '', # 企业微信应用 'QYWX_KEY': '', # 企业微信机器人 'TG_BOT_TOKEN': '', # tg 机器人的 TG_BOT_TOKEN,例:1407203283:AAG9rt-6RDaaX0HBLZQq0laNOh898iFYaRQ 'TG_USER_ID': '', # tg 机器人的 TG_USER_ID,例:1434078534 'TG_API_HOST': '', # tg 代理 api 'TG_PROXY_AUTH': '', # tg 代理认证参数 'TG_PROXY_HOST': '', # tg 机器人的 TG_PROXY_HOST 'TG_PROXY_PORT': '', # tg 机器人的 TG_PROXY_PORT } notify_function = [] # fmt: on # 首先读取 面板变量 或者 github action 运行变量 for k in push_config: if os.getenv(k): v = os.getenv(k) push_config[k] = v def bark(title: str, content: str) -> None: """ 使用 bark 推送消息。 """ if not push_config.get("BARK_PUSH"): print("bark 服务的 BARK_PUSH 未设置!!\n取消推送") return print("bark 服务启动") if push_config.get("BARK_PUSH").startswith("http"): url = f'{push_config.get("BARK_PUSH")}/{urllib.parse.quote_plus(title)}/{urllib.parse.quote_plus(content)}' else: url = f'https://api.day.app/{push_config.get("BARK_PUSH")}/{urllib.parse.quote_plus(title)}/{urllib.parse.quote_plus(content)}' bark_params = { "BARK_ARCHIVE": "isArchive", "BARK_GROUP": "group", "BARK_SOUND": "sound", } params = "" for pair in filter( lambda pairs: pairs[0].startswith("BARK_") and pairs[0] != "BARK_PUSH" and pairs[1] and bark_params.get(pairs[0]), push_config.items(), ): params += f"{bark_params.get(pair[0])}={pair[1]}&" if params: url = url + "?" + params.rstrip("&") response = requests.get(url).json() if response["code"] == 200: print("bark 推送成功!") else: print("bark 推送失败!") def console(title: str, content: str) -> None: """ 使用 控制台 推送消息。 """ print(f"{title}\n\n{content}") def dingding_bot(title: str, content: str) -> None: """ 使用 钉钉机器人 推送消息。 """ if not push_config.get("DD_BOT_SECRET") or not push_config.get("DD_BOT_TOKEN"): print("钉钉机器人 服务的 DD_BOT_SECRET 或者 DD_BOT_TOKEN 未设置!!\n取消推送") return print("钉钉机器人 服务启动") timestamp = str(round(time.time() * 1000)) secret_enc = push_config.get("DD_BOT_SECRET").encode("utf-8") string_to_sign = "{}\n{}".format(timestamp, push_config.get("DD_BOT_SECRET")) string_to_sign_enc = string_to_sign.encode("utf-8") hmac_code = hmac.new( secret_enc, string_to_sign_enc, digestmod=hashlib.sha256 ).digest() sign = urllib.parse.quote_plus(base64.b64encode(hmac_code)) url = f'https://oapi.dingtalk.com/robot/send?access_token={push_config.get("DD_BOT_TOKEN")}&timestamp={timestamp}&sign={sign}' headers = {"Content-Type": "application/json;charset=utf-8"} data = {"msgtype": "text", "text": {"content": f"{title}\n\n{content}"}} response = requests.post( url=url, data=json.dumps(data), headers=headers, timeout=15 ).json() if not response["errcode"]: print("钉钉机器人 推送成功!") else: print("钉钉机器人 推送失败!") def feishu_bot(title: str, content: str) -> None: """ 使用 飞书机器人 推送消息。 """ if not push_config.get("FSKEY"): print("飞书 服务的 FSKEY 未设置!!\n取消推送") return print("飞书 服务启动") url = f'https://open.feishu.cn/open-apis/bot/v2/hook/{push_config.get("FSKEY")}' data = {"msg_type": "text", "content": {"text": f"{title}\n\n{content}"}} response = requests.post(url, data=json.dumps(data)).json() if response.get("StatusCode") == 0: print("飞书 推送成功!") else: print("飞书 推送失败!错误信息如下:\n", response) def go_cqhttp(title: str, content: str) -> None: """ 使用 go_cqhttp 推送消息。 """ if not push_config.get("GOBOT_URL") or not push_config.get("GOBOT_QQ"): print("go-cqhttp 服务的 GOBOT_URL 或 GOBOT_QQ 未设置!!\n取消推送") return print("go-cqhttp 服务启动") url = f'{push_config.get("GOBOT_URL")}?access_token={push_config.get("GOBOT_TOKEN")}&{push_config.get("GOBOT_QQ")}&message=标题:{title}\n内容:{content}' response = requests.get(url).json() if response["status"] == "ok": print("go-cqhttp 推送成功!") else: print("go-cqhttp 推送失败!") def iGot(title: str, content: str) -> None: """ 使用 iGot 推送消息。 """ if not push_config.get("IGOT_PUSH_KEY"): print("iGot 服务的 IGOT_PUSH_KEY 未设置!!\n取消推送") return print("iGot 服务启动") url = f'https://push.hellyw.com/{push_config.get("IGOT_PUSH_KEY")}' data = {"title": title, "content": content} headers = {"Content-Type": "application/x-www-form-urlencoded"} response = requests.post(url, data=data, headers=headers).json() if response["ret"] == 0: print("iGot 推送成功!") else: print(f'iGot 推送失败!{response["errMsg"]}') def serverJ(title: str, content: str) -> None: """ 通过 serverJ 推送消息。 """ if not push_config.get("PUSH_KEY"): print("serverJ 服务的 PUSH_KEY 未设置!!\n取消推送") return print("serverJ 服务启动") data = {"text": title, "desp": content.replace("\n", "\n\n")} if push_config.get("PUSH_KEY").index("SCT") != -1: url = f'https://sctapi.ftqq.com/{push_config.get("PUSH_KEY")}.send' else: url = f'https://sc.ftqq.com/${push_config.get("PUSH_KEY")}.send' response = requests.post(url, data=data).json() if response.get("errno") == 0 or response.get("code") == 0: print("serverJ 推送成功!") else: print(f'serverJ 推送失败!错误码:{response["message"]}') def pushplus_bot(title: str, content: str) -> None: """ 通过 push+ 推送消息。 """ if not push_config.get("PUSH_PLUS_TOKEN"): print("PUSHPLUS 服务的 PUSH_PLUS_TOKEN 未设置!!\n取消推送") return print("PUSHPLUS 服务启动") url = "http://www.pushplus.plus/send" data = { "token": push_config.get("PUSH_PLUS_TOKEN"), "title": title, "content": content, "topic": push_config.get("PUSH_PLUS_USER"), } body = json.dumps(data).encode(encoding="utf-8") headers = {"Content-Type": "application/json"} response = requests.post(url=url, data=body, headers=headers).json() if response["code"] == 200: print("PUSHPLUS 推送成功!") else: url_old = "http://pushplus.hxtrip.com/send" response = requests.post(url=url_old, data=body, headers=headers).json() if response["code"] == 200: print("PUSHPLUS(hxtrip) 推送成功!") else: print("PUSHPLUS 推送失败!") def qmsg_bot(title: str, content: str) -> None: """ 使用 qmsg 推送消息。 """ if not push_config.get("QMSG_KEY") or not push_config.get("QMSG_TYPE"): print("qmsg 的 QMSG_KEY 或者 QMSG_TYPE 未设置!!\n取消推送") return print("qmsg 服务启动") url = f'https://qmsg.zendee.cn/{push_config.get("QMSG_TYPE")}/{push_config.get("QMSG_KEY")}' payload = {"msg": f'{title}\n\n{content.replace("----", "-")}'.encode("utf-8")} response = requests.post(url=url, params=payload).json() if response["code"] == 0: print("qmsg 推送成功!") else: print(f'qmsg 推送失败!{response["reason"]}') def wecom_app(title: str, content: str) -> None: """ 通过 企业微信 APP 推送消息。 """ if not push_config.get("QYWX_AM"): print("QYWX_AM 未设置!!\n取消推送") return QYWX_AM_AY = re.split(",", push_config.get("QYWX_AM")) if 4 < len(QYWX_AM_AY) > 5: print("QYWX_AM 设置错误!!\n取消推送") return print("企业微信 APP 服务启动") corpid = QYWX_AM_AY[0] corpsecret = QYWX_AM_AY[1] touser = QYWX_AM_AY[2] agentid = QYWX_AM_AY[3] try: media_id = QYWX_AM_AY[4] except IndexError: media_id = "" wx = WeCom(corpid, corpsecret, agentid) # 如果没有配置 media_id 默认就以 text 方式发送 if not media_id: message = title + "\n\n" + content response = wx.send_text(message, touser) else: response = wx.send_mpnews(title, content, media_id, touser) if response == "ok": print("企业微信推送成功!") else: print("企业微信推送失败!错误信息如下:\n", response) class WeCom: def __init__(self, corpid, corpsecret, agentid): self.CORPID = corpid self.CORPSECRET = corpsecret self.AGENTID = agentid def get_access_token(self): url = "https://qyapi.weixin.qq.com/cgi-bin/gettoken" values = { "corpid": self.CORPID, "corpsecret": self.CORPSECRET, } req = requests.post(url, params=values) data = json.loads(req.text) return data["access_token"] def send_text(self, message, touser="@all"): send_url = ( "https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=" + self.get_access_token() ) send_values = { "touser": touser, "msgtype": "text", "agentid": self.AGENTID, "text": {"content": message}, "safe": "0", } send_msges = bytes(json.dumps(send_values), "utf-8") respone = requests.post(send_url, send_msges) respone = respone.json() return respone["errmsg"] def send_mpnews(self, title, message, media_id, touser="@all"): send_url = ( "https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=" + self.get_access_token() ) send_values = { "touser": touser, "msgtype": "mpnews", "agentid": self.AGENTID, "mpnews": { "articles": [ { "title": title, "thumb_media_id": media_id, "author": "Author", "content_source_url": "", "content": message.replace("\n", "<br/>"), "digest": message, } ] }, } send_msges = bytes(json.dumps(send_values), "utf-8") respone = requests.post(send_url, send_msges) respone = respone.json() return respone["errmsg"] def wecom_bot(title: str, content: str) -> None: """ 通过 企业微信机器人 推送消息。 """ if not push_config.get("QYWX_KEY"): print("企业微信机器人 服务的 QYWX_KEY 未设置!!\n取消推送") return print("企业微信机器人服务启动") url = f"https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key={push_config.get('QYWX_KEY')}" headers = {"Content-Type": "application/json;charset=utf-8"} data = {"msgtype": "text", "text": {"content": f"{title}\n\n{content}"}} response = requests.post( url=url, data=json.dumps(data), headers=headers, timeout=15 ).json() if response["errcode"] == 0: print("企业微信机器人推送成功!") else: print("企业微信机器人推送失败!") def telegram_bot(title: str, content: str) -> None: """ 使用 telegram 机器人 推送消息。 """ if not push_config.get("TG_BOT_TOKEN") or not push_config.get("TG_USER_ID"): print("tg 服务的 bot_token 或者 user_id 未设置!!\n取消推送") return print("tg 服务启动") if push_config.get("TG_API_HOST"): url = f"https://{push_config.get('TG_API_HOST')}/bot{push_config.get('TG_BOT_TOKEN')}/sendMessage" else: url = ( f"https://api.telegram.org/bot{push_config.get('TG_BOT_TOKEN')}/sendMessage" ) headers = {"Content-Type": "application/x-www-form-urlencoded"} payload = { "chat_id": str(push_config.get("TG_USER_ID")), "text": f"{title}\n\n{content}", "disable_web_page_preview": "true", } proxies = None if push_config.get("TG_PROXY_HOST") and push_config.get("TG_PROXY_PORT"): if push_config.get("TG_PROXY_AUTH") is not None and "@" not in push_config.get( "TG_PROXY_HOST" ): push_config["TG_PROXY_HOST"] = ( push_config.get("TG_PROXY_AUTH") + "@" + push_config.get("TG_PROXY_HOST") ) proxyStr = "http://{}:{}".format( push_config.get("TG_PROXY_HOST"), push_config.get("TG_PROXY_PORT") ) proxies = {"http": proxyStr, "https": proxyStr} response = requests.post( url=url, headers=headers, params=payload, proxies=proxies ).json() if response["ok"]: print("tg 推送成功!") else: print("tg 推送失败!") def one() -> str: """ 获取一条一言。 :return: """ url = "https://v1.hitokoto.cn/" res = requests.get(url).json() return res["hitokoto"] + " ----" + res["from"] if push_config.get("BARK_PUSH"): notify_function.append(bark) if push_config.get("CONSOLE"): notify_function.append(console) if push_config.get("DD_BOT_TOKEN") and push_config.get("DD_BOT_SECRET"): notify_function.append(dingding_bot) if push_config.get("FSKEY"): notify_function.append(feishu_bot) if push_config.get("GOBOT_URL") and push_config.get("GOBOT_QQ"): notify_function.append(go_cqhttp) if push_config.get("IGOT_PUSH_KEY"): notify_function.append(iGot) if push_config.get("PUSH_KEY"): notify_function.append(serverJ) if push_config.get("PUSH_PLUS_TOKEN"): notify_function.append(pushplus_bot) if push_config.get("QMSG_KEY") and push_config.get("QMSG_TYPE"): notify_function.append(qmsg_bot) if push_config.get("QYWX_AM"): notify_function.append(wecom_app) if push_config.get("QYWX_KEY"): notify_function.append(wecom_bot) if push_config.get("TG_BOT_TOKEN") and push_config.get("TG_USER_ID"): notify_function.append(telegram_bot) def send(title: str, content: str) -> None: if not content: print(f"{title} 推送内容为空!") return hitokoto = push_config.get("HITOKOTO") text = one() if hitokoto else "" content += "\n\n" + text ts = [ threading.Thread(target=mode, args=(title, content), name=mode.__name__) for mode in notify_function ] [t.start() for t in ts] [t.join() for t in ts] def main(): send("title", "content") if __name__ == "__main__": main()
actions.py
# This files contains your custom actions which can be used to run # custom Python code. # # See this guide on how to implement these action: # https://rasa.com/docs/rasa/core/actions/#custom-actions/ # This is a simple example for a custom action which utters "Hello World!" import re import io import ast import requests import numpy as np import pandas as pd import random import multiprocessing import threading, queue from decimal import Decimal from ttictoc import tic, toc from typing import Any, Text, Dict, List, Union, Optional from rasa_sdk import Action, Tracker from rasa_sdk import FormValidationAction from rasa_sdk.events import SlotSet, FollowupAction from rasa_sdk.types import DomainDict from rasa_sdk.executor import CollectingDispatcher import warnings from statistics import mean from os import path, getenv from datetime import datetime import matplotlib.pyplot as plt from botocore.exceptions import ClientError from boto3.exceptions import S3UploadFailedError import boto3 from sqlalchemy import create_engine import sqlalchemy.types as sql_types DB_AWS_ACCESS_KEY_ID = getenv('DB_AWS_ACCESS_KEY_ID') DB_AWS_SECRET_ACCESS_KEY = getenv('DB_AWS_SECRET_ACCESS_KEY') DB_AWS_BUCKET = 'journeypic' # ------------------------------------------------------------------ def connect_to_server(params_dict, logging_func=print, debug=False): connit = params_dict['connit_type'] + '://' + params_dict['connit_user'] + ':' \ + params_dict['connit_pass'] + '@' \ + params_dict['connit_host'] + ':' \ + params_dict['connit_port'] + '/' \ + params_dict['connit_db'] if debug: logging_func(connit) sql_engine = create_engine(connit, echo=False) try: sql_engine.connect() logging_func("Connected Successfully") except Exception as e: logging_func("Error connecting to SQL server!\n\n%s\n" % str(e)) raise (e) return sql_engine def read_table(sql_engine, sql_query, logging_func=print, debug=False): df = pd.read_sql(sql_query, sql_engine) if debug: match1 = search('FROM (.*) ORDER', sql_query) match2 = search('FROM (.*) LIMIT', sql_query) table_name = "Data" if match1: table_name = match1.group(1) elif match2: table_name = match2.group(1) logging_func('\n%s %s:' % (table_name, str(df.shape))) logging_func(df.head().to_string()) return df # ------------------------------------------------------------------ def res_timer(res, tracker): timer_state = tracker.get_slot('timer_state') if tracker.get_slot('timer_state') else 'n/a' if timer_state == 'on': res += '\nElapsed time: %.2f sec' % toc() return res # ------------------------------------------------------------------ def res_error(res, tracker, e): timer_state = tracker.get_slot('timer_state') if tracker.get_slot('timer_state') else 'n/a' if timer_state == 'on': res += '\nERROR: %s' % e return res # ------------------------------------------------------------------ def simpleQuestionAnswer(tracker, entity, db_dict, user_intent=""): lut_df = db_dict['lut'] custom_df = db_dict['nutrients_qna'] feature = lut_df['Entity'][entity] try: if feature in custom_df.index: res = custom_df.loc[feature][user_intent] else: res = custom_df[[str(s) in feature for s in custom_df.index.tolist()]][user_intent][0] if 'slot#' in res: res_list = res.split(' ') for k, el in enumerate(res_list): if 'slot#' in el: res_list[k] = str(tracker.get_slot(el.split('#')[1])) res = ' '.join(res_list) res_list = re.findall('\{.*?\}', res) for match in res_list: res = res.replace(match, str(eval(match[1:-1]))) except: res = "אין לי מושג, מצטער!" return res def checkPrecentinres(title, x): precent_position = None listNumbers = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'] if 'אחוז' in title: precent_position = title.find('אחוז') if '%' in title: precent_position = title.find('%') if precent_position is not None: if title[precent_position - 2] == '0' and title[precent_position - 3] not in listNumbers: title = title[:title.find(x)] title += x return title # ------------------------------------------------------------------ def upload_file_to_s3(local_file, s3_folder, s3_file, aws_access_key_id, aws_secret_access_key, aws_bucket, debug_en=False): """ upload a given file to given location on Amazon-S3 """ success = True HTTP_OK = 200 # Connect to Amazon-S3 client: s3_client = boto3.client('s3', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key) # Make a new directory on S3 (if not already exists): if s3_folder + '/' in [x['Key'] for x in s3_client.list_objects(Bucket=aws_bucket)['Contents']]: pass elif not debug_en: res = s3_client.put_object(Bucket=aws_bucket, Key='%s/' % s3_folder) success = res['ResponseMetadata']['HTTPStatusCode'] == HTTP_OK if not success: return success, "" # Upload local_file to S3: x = 3 if not debug_en: try: if path.exists(local_file): s3_client.upload_file(local_file, aws_bucket, path.join(s3_folder, s3_file)) s3_client.put_object_acl(ACL='public-read', Bucket=aws_bucket, Key=path.join(s3_folder, s3_file)) except (ClientError, S3UploadFailedError) as e: success = False, "" return success, "https://%s.s3.eu-central-1.amazonaws.com/%s/%s" % (aws_bucket, s3_folder, s3_file) # ------------------------------------------------------------------ def donut_generator(names, sizes, radius=0.7, textstr_title='', colors=None, figname="image.png"): CARBS_GRAMS_CALOIRES = 4 PROTEIN_GRAMS_CALOIRES = 4 FAT_GRAMS_CALOIRES = 9 if colors is None: colors = [] my_circle = plt.Circle((0, 0), radius, color='white') fig, ax = plt.subplots() labels = [':' + k1 + '\nםרג ' + str(round(k2, 2)) for k1, k2 in zip(names, sizes)] if colors: ax.pie(sizes, colors=colors) else: ax.pie(sizes) plt.legend(bbox_to_anchor=(1.0, 0.88), fontsize=18, labels=labels) p = plt.gcf() p.gca().add_artist(my_circle) if textstr_title: ax.text(0.34, 1.05, textstr_title, transform=ax.transAxes, weight='bold', fontsize=30, verticalalignment='center_baseline') sizes[0] *= PROTEIN_GRAMS_CALOIRES sizes[1] *= CARBS_GRAMS_CALOIRES sizes[2] *= FAT_GRAMS_CALOIRES sum2 = round(sum(sizes), 2) textstr_center1 = str(sum2) textstr_center2 = 'קלוריות'[::-1] ax.text(0.39, 0.56, textstr_center1, transform=ax.transAxes, weight='bold', fontsize=24, verticalalignment='center_baseline') ax.text(0.37, 0.44, textstr_center2, transform=ax.transAxes, fontsize=18, verticalalignment='center_baseline') if figname: fig.patch.set_facecolor('white') fig.savefig(figname, bbox_inches='tight', facecolor='white') else: plt.show() # ------------------------------------------------------------------ def donut_generator_wrapper(title, data): names = [x[::-1] for x in list(data.keys())] sizes = list(data.values()) colors = ['darkorange', 'lightgreen', 'blue'] textstr_title = title[::-1] figname = "donut_image1.png" donut_generator(names=names, sizes=sizes, radius=0.7, textstr_title=textstr_title, colors=colors, figname=figname) return figname # ------------------------------------------------------------------ def iniliatize_Diagram(title, data): unique_filename = lambda fname: "%s_%s%s" % (path.splitext(fname)[0], datetime.now().strftime("%m%d%Y_%H%M%S"), path.splitext(fname)[1]) figname = donut_generator_wrapper(title, data) res, figure_url = upload_file_to_s3(local_file=figname, s3_folder="auto_generated", s3_file=unique_filename(figname), aws_access_key_id=DB_AWS_ACCESS_KEY_ID, aws_secret_access_key=DB_AWS_SECRET_ACCESS_KEY, aws_bucket=DB_AWS_BUCKET) return figure_url # ------------------------------------------------------------------ def activate_load_db(name, table, dic): dic[name] = load_db(table) def get_tables(bits): table_dict = {'0x1': 'tzameret', '0x2': 'lut', '0x4': 'nutrients_qna', '0x8': 'food_qna', '0x10': 'common_food', '0x20': 'food_ranges', '0x40': 'micro_nutrients', '0x80': 'food_units', '0x100': 'bloodtest_vals', '0x200': 'food_units_aliases', '0x400': 'food_units_features', '0x800': 'subs_tags_alias', '0x1000': 'Weights_and_measures'} scale = 16 bits_binary = bin(int(bits, scale))[2:].zfill(len(bits) * 4) numbers_zero = '' numbers = [] for digit in reversed(bits_binary): if digit != '1': numbers_zero += digit else: numbers.append('1' + numbers_zero) numbers_zero += '0' for i, value in enumerate(numbers): decimal_representation = int(value, 2) temp = hex(decimal_representation) temp = int(temp, 16) numbers[i] = hex(temp) manager = multiprocessing.Manager() db_dict = manager.dict() jobs = [] for value in numbers: # Pass the user at position i instead of the whole list p = multiprocessing.Process(target=activate_load_db, args=(table_dict[value], value, db_dict)) jobs.append(p) p.start() for proc in jobs: proc.join() return db_dict def load_db(db_bitmap, read_databse_en=True): # available_tables_df=read_database sql_params = {'connit_type': 'postgresql', 'connit_user': 'newtrds', 'connit_pass': 'q1w2e3r4!', 'connit_host': 'newt-tzameret-db.c1ub7aqk5fah.eu-central-1.rds.amazonaws.com', 'connit_port': '5432', 'connit_db': 'postgres', 'max_records': 1000} sql_engine = connect_to_server(sql_params) if db_bitmap == '0x1': tzameret = read_table(sql_engine, "SELECT * FROM tzameret_entity") return tzameret # "Zameret_hebrew_features" - entities aliases if db_bitmap == '0x2': lut = read_table(sql_engine, "SELECT * FROM rasa_lut_entity") lut = lut.set_index('Entity Alias') return lut # "Zameret_hebrew_features" - nutrients_questions if db_bitmap == '0x4': nutrients_qna = read_table(sql_engine, "SELECT * FROM rasa_nutrients_qna_entity") nutrients_qna = nutrients_qna.set_index('Entity') return nutrients_qna # "Zameret_hebrew_features" - Food questions if db_bitmap == '0x8': food_qna = read_table(sql_engine, "SELECT * FROM rasa_food_qna_entity") food_qna = food_qna.set_index('nutrition_density') return food_qna # "Zameret_hebrew_features" - List of common foods if db_bitmap == '0x10': common_food = read_table(sql_engine, "SELECT * FROM common_food_entity") common_food = common_food.set_index('common_name') return common_food # "Newt Machine Readable" - FoodItemRanges if db_bitmap == '0x20': food_ranges = read_table(sql_engine, "SELECT * FROM food_ranges_entity") food_ranges = food_ranges.set_index('Nutrient') return food_ranges # "Newt Machine Readable" - MicroNutrients if db_bitmap == '0x40': micro_nutrients = read_table(sql_engine, "SELECT * FROM micronutrients_entity") return micro_nutrients # "Newt Machine Readable" - MicroNutrients if db_bitmap == '0x80': food_units = read_table(sql_engine, "SELECT * FROM food_units_entity") return food_units # "Newt Machine Readable" - BloodTestValues if db_bitmap == '0x100': bloodtest_vals = read_table(sql_engine, "SELECT * FROM bloodtest_vals_entity") return bloodtest_vals # "Zameret_hebrew_features" - Weight aliases if db_bitmap == '0x200': food_units_aliases = read_table(sql_engine, "SELECT * FROM food_units_aliases_entity") return food_units_aliases # "Zameret_hebrew_features" - For Noa if db_bitmap == '0x400': food_units_features_df = read_table(sql_engine, "SELECT * FROM tzameret_newt_entity") food_units_features = food_units_features_df.dropna(axis=0, how='all') food_units_features = food_units_features.rename({'Primary_SN': 'smlmitzrach'}, axis=1) return food_units_features # "Zameret_hebrew_features" - subs_tags_alias if db_bitmap == '0x800': subs_tags_alias = read_table(sql_engine, "SELECT * FROM subs_tags_aliases_entity") subs_tags_alias = subs_tags_alias.set_index('Entity Alias').fillna(0) return subs_tags_alias if db_bitmap == '0x1000': Weights_and_measures = read_table(sql_engine, "SELECT * FROM weights_measures") return Weights_and_measures def load_db_googleSheet(db_bitmap): db_dict = {} # "Zameret food list 22_JAN_2020" if (db_bitmap & 0x1) > 0: url = "https://docs.google.com/spreadsheets/d/1VvXmu5l58XwcDDtqz0bkHIl_dC92x3eeVdZo2uni794/export?format=csv&gid=84892416" s = requests.get(url).content db_dict['tzameret'] = pd.read_csv(io.StringIO(s.decode('utf-8'))).fillna(0) # "Zameret_hebrew_features" - entities aliases if (db_bitmap & 0x2) > 0: url = "https://docs.google.com/spreadsheets/d/1VvXmu5l58XwcDDtqz0bkHIl_dC92x3eeVdZo2uni794/export?format=csv&gid=1805881936" s = requests.get(url).content db_dict['lut'] = pd.read_csv(io.StringIO(s.decode('utf-8')), header=0, index_col=["Entity Alias"], usecols=["Entity Alias", "Entity", "Units", "Entity name", "RDA name", "action_simple_question", "action_nutrition_howmanyxiny_x", "action_nutrition_howmanyxiny_y", "action_nutrition_is_food_healthy", "action_nutrition_is_food_recommended", "action_nutrition_what_is_healthier_x", "action_nutrition_what_is_healthier_y", "action_nutrition_get_rda", "action_nutrition_bloodtest_generic", "action_nutrition_bloodtest_value", "action_nutrition_food_substitute", "action_nutrition_compare_foods", "action_nutrition_howmanyxyinz"]).fillna(0) # "Zameret_hebrew_features" - nutrients_questions if (db_bitmap & 0x4) > 0: url = "https://docs.google.com/spreadsheets/d/1VvXmu5l58XwcDDtqz0bkHIl_dC92x3eeVdZo2uni794/export?format=csv&gid=1706335378" s = requests.get(url).content db_dict['nutrients_qna'] = pd.read_csv(io.StringIO(s.decode('utf-8')), header=0, index_col=["Entity"]).fillna(0) # "Zameret_hebrew_features" - Food questions if (db_bitmap & 0x8) > 0: url = "https://docs.google.com/spreadsheets/d/1VvXmu5l58XwcDDtqz0bkHIl_dC92x3eeVdZo2uni794/export?format=csv&gid=1099284657" s = requests.get(url).content db_dict['food_qna'] = pd.read_csv(io.StringIO(s.decode('utf-8')), header=0, index_col=["nutrition_density"], usecols=["nutrition_density", "energy_density", "description_density"]).fillna(0) # "Zameret_hebrew_features" - List of common foods if (db_bitmap & 0x10) > 0: url = "https://docs.google.com/spreadsheets/d/1VvXmu5l58XwcDDtqz0bkHIl_dC92x3eeVdZo2uni794/export?format=csv&gid=495295419" s = requests.get(url).content db_dict['common_food'] = pd.read_csv(io.StringIO(s.decode('utf-8')), header=0, index_col=["common_name"], usecols=["common_name", "shmmitzrach", "smlmitzrach"]).fillna(0) # "Newt Machine Readable" - FoodItemRanges if (db_bitmap & 0x20) > 0: url = "https://docs.google.com/spreadsheets/d/1IPTflCe6shaP-FBAuXWSFCX5hSuAo7bMGczNMTSTYY0/export?format=csv&gid=885087351" s = requests.get(url).content db_dict['food_ranges'] = pd.read_csv(io.StringIO(s.decode('utf-8')), header=0, index_col=["Nutrient"], usecols=["Nutrient", "Medium - threshold per 100gr", "High - threshold per 100gr", "good_or_bad", "tzameret_name", "hebrew_name"]).fillna(0) # "Newt Machine Readable" - MicroNutrients if (db_bitmap & 0x40) > 0: url = "https://docs.google.com/spreadsheets/d/1IPTflCe6shaP-FBAuXWSFCX5hSuAo7bMGczNMTSTYY0/export?format=csv&gid=222801095" s = requests.get(url).content micro_nutrients_df = pd.read_csv(io.StringIO(s.decode('utf-8')), header=0).fillna(0) db_dict['micro_nutrients'] = micro_nutrients_df # "Newt Machine Readable" - MicroNutrients if (db_bitmap & 0x80) > 0: url = "https://docs.google.com/spreadsheets/d/1VvXmu5l58XwcDDtqz0bkHIl_dC92x3eeVdZo2uni794/export?format=csv&gid=1373096469" s = requests.get(url).content food_units_df = pd.read_csv(io.StringIO(s.decode('utf-8')), header=0).fillna(0) db_dict['food_units'] = food_units_df # "Newt Machine Readable" - BloodTestValues if (db_bitmap & 0x100) > 0: url = "https://docs.google.com/spreadsheets/d/1IPTflCe6shaP-FBAuXWSFCX5hSuAo7bMGczNMTSTYY0/export?format=csv&gid=1011022304" s = requests.get(url).content bloodtest_df = pd.read_csv(io.StringIO(s.decode('utf-8')), header=0, nrows=19, usecols=range(11)).fillna(0) db_dict['bloodtest_vals'] = bloodtest_df # "Zameret_hebrew_features" - Weight aliases if (db_bitmap & 0x200) > 0: url = "https://docs.google.com/spreadsheets/d/1VvXmu5l58XwcDDtqz0bkHIl_dC92x3eeVdZo2uni794/export?format=csv&gid=623521836" s = requests.get(url).content food_units_aliases_df = pd.read_csv(io.StringIO(s.decode('utf-8')), header=0) db_dict['food_units_aliases'] = food_units_aliases_df # "Zameret_hebrew_features" - For Noa if (db_bitmap & 0x400) > 0: url = "https://docs.google.com/spreadsheets/d/19rYDpki0jgGeNlKLPnINiDGye8QEfQ4IEEWSkLFo83Y/export?format=csv&gid=2106834268" s = requests.get(url).content food_units_features_df = pd.read_csv(io.StringIO(s.decode('utf-8')), header=1) db_dict['food_units_features'] = food_units_features_df.dropna(axis=0, how='all') db_dict['food_units_features'] = db_dict['food_units_features'].rename({'Primary_SN': 'smlmitzrach'}, axis=1) # "Zameret_hebrew_features" - subs_tags_alias if (db_bitmap & 0x800) > 0: url = "https://docs.google.com/spreadsheets/d/1VvXmu5l58XwcDDtqz0bkHIl_dC92x3eeVdZo2uni794/export?format=csv&gid=458428667" s = requests.get(url).content db_dict['subs_tags_alias'] = pd.read_csv(io.StringIO(s.decode('utf-8')), header=0, usecols=["Entity Alias", "Entity", "Show_stopers"]).set_index( 'Entity Alias') if (db_bitmap & 0x1000) > 0: url = "https://docs.google.com/spreadsheets/d/19rYDpki0jgGeNlKLPnINiDGye8QEfQ4IEEWSkLFo83Y/export?format=csv&gid=428717261" s = requests.get(url).content db_dict['Weights_and_measures'] = pd.read_csv(io.StringIO(s.decode('utf-8')), header=0) return db_dict # ------------------------------------------------------------------ def import_sheets(debug=False): '''Import the df noa and tzameret food group tabs from the suggested meal planning sheet as a DataFrame. Import weights and measures, and tzameret food list from Tzameret DB as a DataFrame''' # df = load_db_googleSheet(0x481) df = get_tables('0x481') sheet_id = '19rYDpki0jgGeNlKLPnINiDGye8QEfQ4IEEWSkLFo83Y' gid_2 = '428717261' df_tzameret_food_group = pd.read_csv( f"https://docs.google.com/spreadsheets/d/{sheet_id}/export?format=csv&gid={gid_2}") df_nutrition = df['tzameret'] df_nutrition.fillna(0, inplace=True) df_nutrition.rename(columns={'carbohydrates': 'carbs'}, inplace=True) df_weights = df['food_units'] df_weights.head() df_noa_pre_1 = df['food_units_features'] # df_tzameret_food_group = ['Weights_and_measures'] df_noa = df['food_units_features'] header = list(df_noa_pre_1.columns.values) df_noa.loc[-1] = header # adding a row df_noa.index = df_noa.index + 1 # shifting index df_noa = df_noa.sort_index() # sorting by index df_noa.head() df_noa.columns = df_noa.columns.str.lower() df_noa = df_noa.iloc[1:] # df_noa doesn not have the first row with the numbers to make it easier to filter data df_noa['lactose_free'] = df_noa['lactose_free'].replace({'Low Lactose': 'Yes', 'Lactose Free': 'Yes'}) df_noa['food_category'] = df_noa['food_category'].replace({'N/A': 'Savoury_Snacks'}) df_noa.dropna(subset=["food_name"], inplace=True) # dropping all meals that don't have a meal name to get complete list of actual meals df_noa = df_noa.rename(columns={'smlmitzrach': 'primary_sn'}) df_noa['sn_1'] = df_noa['primary_sn'].astype(str).str[:1] df_noa['sn_2'] = df_noa['primary_sn'].astype(str).str[1:2] return df_noa, df_tzameret_food_group, df_weights, df_nutrition # ------------------------------------------------------------------ def get_rda(name, tracker, nutrient_temp="", intent_upper=False): actions_list = ['action_nutrition_howmanyxyinz', 'action_nutrition_compare_foods'] # db_dict = load_db_googleSheet(0x46) db_dict = get_tables('0x46') lut_df = db_dict['lut'] micro_nutrients_df = db_dict['micro_nutrients'] if intent_upper: micro_nutrients_df = micro_nutrients_df[micro_nutrients_df['Type'] == "Upper Limit"] else: micro_nutrients_df = micro_nutrients_df[micro_nutrients_df['Type'] == "RDA"] status = "match" if not (tracker.get_slot('gender') and tracker.get_slot('age') and tracker.get_slot( 'weight') and tracker.get_slot( 'height')): status = "default" nutrient = None if name in actions_list: nutrient = nutrient_temp else: x = tracker.get_slot('x') if tracker.get_slot('x') else None if x is not None and x is not "": nutrient = x else: for ent in tracker.latest_message.get('entities'): if ent['entity'] in lut_df[name].values: nutrient = ent['value'] break if nutrient is None: nutrient = nutrient_temp try: feature = lut_df['Entity'][nutrient] feature_rda = lut_df['RDA name'][lut_df['Entity name'] == feature][0] gender = "Male" if tracker.get_slot('gender') == "זכר": gender = "Male" elif tracker.get_slot('gender') == "נקבה": gender = "Female" user_vars = {} user_vars['age'] = tracker.get_slot('age') if tracker.get_slot('age') else "40" user_vars['weight'] = tracker.get_slot('weight') if tracker.get_slot('weight') else "80" user_vars['height'] = tracker.get_slot('height') if tracker.get_slot('height') else "180" rda_row = micro_nutrients_df[(micro_nutrients_df['Micronutrient'] == feature_rda) & \ ((micro_nutrients_df['Gender'] == "ANY") | ( micro_nutrients_df['Gender'] == gender)) & \ ((micro_nutrients_df['Pregnancy'] == "ANY") | ( micro_nutrients_df['Pregnancy'] == "No")) & \ ((micro_nutrients_df['Lactating'] == "ANY") | ( micro_nutrients_df['Lactating'] == "No")) & \ ((micro_nutrients_df['Age Min'] == "ANY") | ( micro_nutrients_df['Age Min'].astype(float) <= int( user_vars['age']))) & \ ((micro_nutrients_df['Age Max'] == "ANY") | ( micro_nutrients_df['Age Max'].astype(float) > int(user_vars['age'])))] rda_text = str(rda_row['Free Text'].values[0]) rda_value = str(rda_row['Value'].values[0]) rda_units = rda_row['Units'].values[0] rda_Image = rda_row['Image'].values[0] if 'slot#' in rda_value: rda_value_list = rda_value.split(' ') for k, el in enumerate(rda_value_list): if 'slot#' in el and el.split('#')[1] in user_vars: rda_value_list[k] = user_vars[el.split('#')[1]] strs = ' '.join([str(elem) for elem in rda_value_list]) # rda_value = eval(' '.join(rda_value_list)) rda_value = eval(str(strs)) rda_value = float(rda_value) if 'slot#' in rda_text: rda_text_list = rda_text.split(' ') for k, el in enumerate(rda_text_list): if 'slot#' in el: rda_text_list[k] = tracker.get_slot(el.split('#')[1]) rda_text = ' '.join(rda_text_list) rda_text_list = re.findall('\{.*?\}', rda_text) for match in rda_text_list: rda_text = rda_text.replace(match, str(eval(match[1:-1]))) if rda_text == "0": rda_text = "" return rda_value, rda_units, rda_text, status, nutrient, rda_Image except: return -1, -1, "", "missmatch", nutrient, "" # ------------------------------------------------------------------ def get_personal_str(rda_status, tracker): age = tracker.get_slot('age') if tracker.get_slot('age') and rda_status == "match" else '40' gender = tracker.get_slot('gender') if tracker.get_slot('gender') and rda_status == "match" else 'זכר' weight = tracker.get_slot('weight') if tracker.get_slot('weight') and rda_status == "match" else '80' height = tracker.get_slot('height') if tracker.get_slot('height') and rda_status == "match" else '180' if rda_status == "default": personal_str = "עבור %s בגיל %s במשקל %s ובגובה %s" % (gender, age, weight, height) else: personal_str = "עבורך (%s בגיל %s במשקל %s ובגובה %s)" % (gender, age, weight, height) return personal_str # ------------------------------------------------------------------ def get_food_nutrition_density(food, food_ranges_db): # Nutrition Density is defined in Tzameret: density_normalized = float(food["Nutrition density normalized"]) # Thresholds are defined in Machine-Readable: density = food_ranges_db[food_ranges_db.index == "Nutrition density"] density_med = float(density["Medium - threshold per 100gr"]) density_high = float(density["High - threshold per 100gr"]) # Binning: res = "high" if density_normalized < density_med: res = "low" elif density_normalized < density_high: res = "med" return density, res # ------------------------------------------------------------------ def get_food_energy_density(food, food_ranges_db): # Energy Density is defined in Tzameret: density_normalized = float(food["Energy density"]) # Thresholds are defined in Machine-Readable: density = food_ranges_db[food_ranges_db.index == "Energy density"] density_med = float(density["Medium - threshold per 100gr"]) density_high = float(density["High - threshold per 100gr"]) # Binning: res = "high" if density_normalized < density_med: res = "low" elif density_normalized < density_high: res = "med" return density, res # ------------------------------------------------------------------ def getcolums_FromDataFrame(db, colum): res = '' index_temp = 1 for index, row in db.iterrows(): res = res + str(index_temp) + '. ' + row[colum] + '\n' index_temp += 1 return res def get_entity_filters(entity_fromrasa, negative_words_list, lut): entities_temp = {} entity = re.sub('וו|-|מנה|ארוחה|גם|הוא|שהוא|שהיא|[,?/123456789]|והיא|והוא', '', entity_fromrasa) entity = entity.replace(' ', ' ') entities = entity.split(' ') for index, ent in enumerate(entities): filter_type_word = False ent = ent.strip() if ent[0] == 'ו': ent = ent[1:] entities_temp[ent] = fliter_type(ent, lut, filter_type_word) if ent == '' or ent == ' ' or ent in negative_words_list or len(ent) <= 2: continue if index == 0 and ent in negative_words_list: continue if index == 0: entities_temp[ent] = fliter_type(ent, lut, filter_type_word) else: if entities[index - 1] in negative_words_list: filter_type_word = True entities_temp[ent] = fliter_type(ent, lut, filter_type_word) return entities_temp def fliter_type(entity, lut, type_filter): # get the entity alias for the Zameret if 'פלאו' in entity or 'פליאו' in entity: entity = "Paleo" elif 'טבעוני' in entity or 'טבעונית' in entity or 'טבעוניים' in entity or 'טבעונים' in entity: entity = "Vegan" elif 'צמחוני' in entity or 'צמחוניים' in entity or 'צמחונית' or 'צמחונים' in entity: entity = "Vegetarian" else: entity = lut[lut.index == entity]['Entity'].iloc[0] if entity == 'Dairy' and type_filter is False: return entity, 'No' if entity == 'Fish_Free' and type_filter is False: return entity, 'No' elif entity == 'Fish_Free': return entity, 'Yes' return entity, 'Yes' def how_many_x_in_y_core(x, y, food_units, name, tracker): # db_dict = load_db_googleSheet(0x293) db_dict = get_tables('0x293') y_common = y if y in db_dict['common_food'].index: y_common = db_dict['common_food'][db_dict['common_food'].index == y]['shmmitzrach'][0] else: y_food = ' '.join(y.split(' ')[1:]) food_units = db_dict['food_units_aliases'][db_dict['food_units_aliases']['Unit Alias'] == y.split(' ')[0]][ 'Zameret unit'] if food_units.empty: food_units = y.split(' ')[0] else: food_units = food_units.values[0] if y_food in db_dict['common_food'].index: y_common = db_dict['common_food'][db_dict['common_food'].index == y_food]['shmmitzrach'][0] else: y_common = y_food food = db_dict['tzameret'][db_dict['tzameret']['shmmitzrach'].str.contains(y_common)].iloc[0, :] feature = db_dict['lut'][db_dict['lut'].index == x]["Entity"][0] units = db_dict['lut'][db_dict['lut'].index == x]["Units"][0] food_units_row = pd.Series() if food_units: food_units_row = db_dict['food_units'][(db_dict['food_units']['smlmitzrach'] == food['smlmitzrach']) & (db_dict['food_units']['shmmida'] == food_units)] is_food_units_match = not food_units_row.empty or food_units == "100 גרם" food_units_factor = 1.0 if not food_units_row.empty: food_units_factor = food_units_row['mishkal'].values[0] / 100 val = food[feature] * food_units_factor if units == 0: res = "ב-%s של %s יש %.2f %s" % (food_units, food['shmmitzrach'], float(val), x) else: res = "" if not is_food_units_match: res = "לא הצלחתי למצוא נתונים במאגר על היחידה %s עליה שאלת\n" % food_units res += "היחידות הבאות קיימות במאגר, עבור %s:\n" % food['shmmitzrach'] res += ', '.join(db_dict['food_units'][db_dict['food_units']['smlmitzrach'] == food['smlmitzrach']][ 'shmmida'].to_list()) res += "\n" food_units = "100 גרם" res += "ב-%s של %s יש %.2f %s %s" % (food_units, food['shmmitzrach'], float(val), units, x) rda_val, rda_units, rda_text, rda_status, nutrient, x_1 = get_rda(name, tracker, x) if rda_val > 0 and units not in ['יחב"ל']: # rda = 100 * float(val) / rda_val res += "\n" res += "שהם כ-%d אחוז מהקצובה היומית המומלצת %s" % (int(rda), get_personal_str(rda_status, tracker)) if rda_text and rda_text != '0': res += '\n' + rda_text return val, res def reverse_number(n): rev = 0 while (n > 0): a = n % 10 rev = rev * 10 + a n = n // 10 return rev # ------------------------------------------------------------------ # ____ _ _ _ __ __ _ # | __ ) _ _(_) | __| | | \/ | ___ __ _| | # | _ \| | | | | |/ _` | | |\/| |/ _ \/ _` | | # | |_) | |_| | | | (_| | | | | | __/ (_| | | # |____/ \__,_|_|_|\__,_| |_| |_|\___|\__,_|_| # Dictionary that is equivalent to user inputs and filters the df_noa Database based on the inputs def get_coupling(meals_bank, coupling, caloires, can_grams, amount, df, item_type): # type float of the coupling is always nan if isinstance(coupling, float): return 0, False, 0, 0, 0, 0, 0, '' if 'NaN' in coupling or 'nan' in coupling or coupling == '': return 0, False, 0, 0, 0, 0, 0, '' coupling = str(coupling) couling_list = coupling.split(',') if couling_list == []: return 0, False, 0, 0, 0, 0, 0, '' # get random number for the coupling end = len(couling_list) - 1 if '\n' or 'r' in couling_list[len(couling_list) - 1]: end = len(couling_list) - 2 if len(couling_list) == 1: coupling_foods = meals_bank[meals_bank['#'] == int(couling_list[0])] else: couling_number = random.randint(0, end) coupling_foods = meals_bank[meals_bank['#'] == int(couling_list[0])] serial_sn = coupling_foods['primary_sn'].values food_name = coupling_foods['food_name'].values if len(serial_sn) <= 0 or len(food_name) <= 0: return 0, False, 0, 0, 0, 0, 0, '' coupling_foods_serial = int(serial_sn[0]) # get the mida of the coupling food candidate_units = candidate_units_amounts(df, coupling_foods_serial, item_type) candidate_grams = candidate_units[0] for can_grams in candidate_grams: calories_couple, weight, grams, x, y = get_item_property(coupling_foods_serial, can_grams, amount) if caloires < 0: caloires = caloires * -1 if calories_couple <= caloires: return calories_couple, True, coupling_foods_serial, weight, grams, x, y, food_name[0] return 0, False, 0, 0, 0, 0, 0, '' def checkDoublePattern(sentence, pattern): temp = sentence.count(pattern) if temp == 2: return sentence[:sentence.find(pattern) + len(pattern)] return sentence def update_budgets(daily_budget, meals_num, snacks_num, weights): '''Takes total budget, number of meals and snacks, and weights as paramters. Returns budget for each category for every meal''' # change 0.3 to a user params budgets = {} div = (meals_num + inputs.get( 'budget_var') * snacks_num) # Is this supposed to be budget_var(0.3) times snacks num or budget_var times meals_num if div > 0: budgets['meal'] = round(daily_budget / div, 1) budgets['snack'] = round(inputs.get('budget_var') * daily_budget / div, 1) budgets['Carbs'] = round(weights[0] * budgets['meal'], 1) budgets['Protein'] = round(weights[1] * budgets['meal'], 1) budgets['Vegetables'] = round(weights[2] * budgets['meal'], 1) budgets['Fruits'] = round(weights[3] * budgets['snack'], 1) budgets['Fat'] = round(weights[4] * budgets['snack'], 1) budgets['Fat_meal'] = round(weights[4] * budgets['meal'], 1) budgets['Savoury_Snacks'] = round(weights[5] * budgets['snack'], 1) budgets['Sweets'] = round(weights[6] * budgets['snack'], 1) budgets['all'] = round(daily_budget, 1) return budgets def filter_meals_by_features(user_params, df_feature): '''Takes user inputs and a Dataframe as parameters and returns a DataFrame filtered by the user inputs''' for k, v in user_params.items(): if (v == 'Yes') and (debug['debug_en']): df_feature = df_feature.loc[df_feature[k] == v] return df_feature def filter_meals_by_meal_type(df, meal_type): '''Filters the DataFrame by the meal type to be used in making a scoreboard for each meal like breakfast, lunch etc.''' if debug: return df.loc[(df['il_' + meal_type] == 'Yes')] def candidate_units_amounts(item, sn, items_type): '''Returns the different options for mida amount and servings for each amount''' sn_1 = int(item['sn_1'].values[0]) df_max_meal = df_tzameret_food_group.loc[df_tzameret_food_group['ספרה ראשונה בקוד'] == sn_1] units_intersection = [] amounts_intersection = [] if items_type != 'snack': df_max_meal = df_tzameret_food_group.loc[df_tzameret_food_group['ספרה ראשונה בקוד'] == sn_1] max_amount_meal = df_max_meal['mida_maxAmount_meal'].values[0].replace(' ', '').split(',') min_amount_meal = df_max_meal['mida_minAmount_meal'].values[0].replace(' ', '').split(',') df_weights_list = df_weights[df_weights['smlmitzrach'] == sn] weights_list = df_weights_list['mida'].tolist() min_max_amount_meal_units = [int(value.split('_')[0]) for value in max_amount_meal] min_max_amount_meal_amounts = [list(range(int(min_val.split('_')[1]), int(max_val.split('_')[1]) + 1)) for min_val, max_val in zip(min_amount_meal, max_amount_meal)] for k, value in enumerate(min_max_amount_meal_units): if value in weights_list: units_intersection.append(value) amounts_intersection.append(min_max_amount_meal_amounts[k]) else: max_amount_snack = df_max_meal['mida_maxAmount_snack'].values[0].replace(' ', '').split(',') df_weights_list = df_weights[df_weights['smlmitzrach'] == sn] weights_list = df_weights_list['mida'].tolist() max_amount_snack_units = [int(value.split('_')[0]) for value in max_amount_snack] max_amount_snack_amounts = [list(range(1, int(value.split('_')[1]) + 1)) for value in max_amount_snack] for k, value in enumerate(max_amount_snack_units): if value in weights_list: units_intersection.append(value) amounts_intersection.append(max_amount_snack_amounts[k]) return units_intersection, amounts_intersection def get_item_property(sn, grams, serving): '''Returns the total item calories for each item''' # if the mida is 700 then multiply by 100, if any other number divide by 100 weights = df_weights[(df_weights['smlmitzrach'] == sn) & (df_weights['mida'] == grams)] mishkal = weights.iloc[0]['mishkal'] if mishkal == 700: mishkal = mishkal * 100 else: mishkal = mishkal / 100 attribute = df_nutrition.loc[df_nutrition['smlmitzrach'] == str(int(sn))] attribute_total = attribute.iloc[0]['food_energy'] total = attribute_total * mishkal * serving return total, weights.iloc[0]['shmmida'], weights.iloc[0]['mishkal'], weights, serving def update_calorie_budgets(candidate_calories, item_type, bud): '''Updates the calories budget based on how many calories were already used''' bud[item_type] = bud[item_type] - candidate_calories return bud def check_item_type_if_exist_already(meals, df_noa): if meals == [] or len(meals) == 1: return True for item in meals: sn = item['primary_sn'].values[0] item_temp = df_noa[df_noa['primary_sn'] == str(sn)] if item_temp['beef_chicken_fish'].iloc[0] == 'No': return False return True def build_meal(meals_bank, meal_type, budget): # make histogram without penalty score of runnning the simulator 50 times and picking the winners. Run it again with the penalty score '''Builds a meal taking a DataFrame, meal type and budget as parameters. Meal takes item from each category (Carbs, Protein etc.) and returns the meal, weighted average score and total meal calories''' budget_weights = {**budget_weights_meals, **budget_weights_snacks_fruits_fat, **budget_weights_savoury_snacks, **budget_weights_sweets} bud = {} meal_similarity_list = [] df_health = df_nutrition.iloc[1:] max_meal_items = inputs.get('max_items_snack') if meal_type == 'snack' else inputs.get('max_items_meal') nutrition_density_list = [] energy_density_list = [] meal_score = 0 score_list = [] uti_score = [] ind_score = [] score = 0 meals = [] meal_cals = 0 types = [] total_budget = budget.copy() item_types = {'breakfast': ['Carbs', 'Protein', 'Vegetables'], 'lunch': ['Carbs', 'Protein', 'Vegetables'], 'dinner': ['Carbs', 'Protein', 'Vegetables'], 'snack': ['Fat']} if (snacks.get('sweets') == 'Yes') & (len(meals_bank.loc[meals_bank['food_category'] == 'Sweets']) > 0): item_types['snack'].append('Sweets') if (snacks.get('Savoury_Snacks') == 'Yes') & ( len(meals_bank.loc[meals_bank['food_category'] == 'Savoury_Snacks']) > 0): item_types['snack'].append('Savoury_Snacks') if (user_params.get('fruits') == 'No') & (len(meals_bank.loc[meals_bank['food_category'] == 'Fruits']) > 0): item_types['snack'].append('Fruits') for k in range(max_meal_items): for item_type in item_types[meal_type]: success = False if (len(meals_bank.loc[meals_bank['food_category'] == item_type]) > 0): df = meals_bank.loc[meals_bank['food_category'] == item_type].sample() # get the item primary sn sn = int(df['primary_sn'].values[0]) df['primary_sn'] = sn candidate_units = candidate_units_amounts(df, int(df['primary_sn'].values[0]), item_type) candidate_grams = candidate_units[0] for can_grams in candidate_grams: sn = float(df['primary_sn'].values[0]) for candidate_amount in candidate_units[1]: for amount in reversed(candidate_amount): calories, weight, grams, x, y = get_item_property(int(sn), can_grams, amount) can_cals = getattr(calories, "tolist", lambda: candidate_calories)() coupling_numbers = df['coupling'].values[0] coupling_cals, coupling_boolean, coupling_food_primary_sn, weight_coupling, grams_coupling, x, y, food_name_coupling = get_coupling( meals_bank, coupling_numbers, budget[item_type] - can_cals, can_grams, amount, df, item_type) can_cals += coupling_cals if can_cals < budget[item_type]: success = True if success: if success: # check if item of meal type is exist in the meal already if check_item_type_if_exist_already(meals, df_noa): sn_int = int(df['primary_sn'].astype(str).str[:1]) sn1 = int(df['primary_sn'].values[0]) calories1, weight, grams, x, y = get_item_property(sn1, can_grams, amount) bud[item_type] = getattr(calories1, "tolist", lambda: candidate_calories)() units_priority = candidate_grams.index(can_grams) + 1 meal_score += 1 / units_priority df_sn1 = df_tzameret_food_group.loc[ df_tzameret_food_group['ספרה ראשונה בקוד'] == sn_int] df_fish = df_noa.loc[df_noa['primary_sn'] == str(sn1)] food_group = df_sn1['קבוצת המזון'] if sn_int == 2: if df_fish['fish_free'].iloc[0] == 'Yes': meal_similarity_list.append(2.1) else: meal_similarity_list.append(2.2) else: meal_similarity_list.append(sn_int) item_score = (bud[item_type]) / (budget[item_type]) df['score'] = item_score score_list.append(item_score) types.append(df['food_category']) nutrition_density_normalized = df_nutrition.loc[ df_nutrition['smlmitzrach'] == str( int(sn1)), 'Nutrition density normalized'] energy_density = df_health.loc[ df_health['smlmitzrach'] == str(int(sn1)), 'Energy density'] nutrition_density_normalized = nutrition_density_normalized.astype(float) energy_density = energy_density.astype(float) if coupling_boolean: name = df['food_name'].values[0] name += ' עם ' + food_name_coupling df['food_name'] = name dataframe = df[['food_name', 'primary_sn']] dataframe.insert(2, 'Weight', [grams]) dataframe.insert(3, 'Unit', [weight]) dataframe.insert(4, 'Amount', [amount]) meals.append(dataframe) nutrition_density_list.append(nutrition_density_normalized.values.tolist()) energy_density_list.append(energy_density.values.tolist()) meal_cals = meal_cals + calories1 budget = update_calorie_budgets(can_cals, item_type, budget) else: continue break if success or budget[item_type] < units_thr[item_type] or len(meals) >= max_meal_items: break if success or budget[item_type] < type_thr[item_type] or len(meals) >= max_meal_items: break if budget['all'] < inputs['item_thr'] or len(meals) >= max_meal_items: break if len(meals) >= max_meal_items: break types_list_no_duplicates = np.unique([x.values[0] for x in types]).tolist() for each_type in reversed(types_list_no_duplicates): each_score = (float(total_budget.get(each_type)) - float(budget.get(each_type))) / float( total_budget.get(each_type)) ind_score.append(each_score) uti_score.append(budget_weights.get(each_type)) if (len(ind_score) < len(item_types[meal_type])): ind_score.append(0.000001) uti_score.append(.35) if (min(ind_score) < 0.7) and (meal_type != 'snack'): extra_penalty = inputs.get('extra_penalty') else: extra_penalty = 0 if (len(meals)) > 4: meal_penalty_length = (len(meals) - 4) * inputs.get('meal_penalty_length') else: meal_penalty_length = 0 total_utilization = sum(x * y for x, y in zip(ind_score, uti_score)) / sum(uti_score) if len(meal_similarity_list) != len(set(meal_similarity_list)): meal_similarity_penalty = inputs.get('meal_similarity_penalty') else: meal_similarity_penalty = 0 nutrition_density_list = [float(x) for [x] in nutrition_density_list] try: avg_nutrition = round(mean(nutrition_density_list), 4) except: avg_nutrition = nutrition_density_list energy_density_list = [float(x) for [x] in energy_density_list] avg_energy = round(mean(energy_density_list), 4) penalty_score = 1 - meal_score / len(meals) nutrition_boost = avg_nutrition * inputs.get('nutrition_bonus') energy_boost = avg_energy * inputs.get('energy_bonus') if scoring.get('legacy'): score = total_utilization - ( penalty_score * inputs.get('penalty_weight')) - extra_penalty - meal_penalty_length elif scoring.get('legacy_nut'): score = total_utilization - (penalty_score * inputs.get( 'penalty_weight')) - extra_penalty - meal_penalty_length + nutrition_boost elif scoring.get('legacy_ene'): total_utilization - ( penalty_score * inputs.get('penalty_weight')) - extra_penalty - meal_penalty_length + energy_boost else: score = total_utilization - (penalty_score * inputs.get( 'penalty_weight')) - extra_penalty - meal_penalty_length + energy_boost + nutrition_boost return meals, score, meal_cals, ind_score, meal_penalty_length, avg_nutrition, avg_energy, meal_similarity_penalty, meal_similarity_list def build_meal_wrapper(): energy_density_listy = 0.0 meal_similarity_listy = [] nutrition_density_listy = [] meal_similarity_penaltyy = [] nutrition_density_listx = [] energy_density_listx = 0.0 meal_similarity_penaltyx = [] meal_similarity_listx = [] penalty_lengthy = [] # Builds and populates a scoreboard that sorts the meals based on their score x = -3 pd.set_option('precision', 2) max_iterations = inputs.get('max_iter') budget_weights = {**budget_weights_meals, **budget_weights_snacks_fruits_fat, **budget_weights_savoury_snacks, **budget_weights_sweets} budget_weights_list = [] for k, v in budget_weights.items(): budget_weights_list.append(v) score_tracker = -2 total_cals = 0 meals = {} user_meals_num = inputs.get('meals_num') user_snacks_num = inputs.get('snacks_num') filler = [] meal_types = ['breakfast', 'lunch', 'dinner'] for k in range(inputs.get('snacks_num')): meal_types.append('snack') features = filter_meals_by_features(user_params, df_noa) for meal_type in meal_types: bank = filter_meals_by_meal_type(features, meal_type) x += 1 scoreboard = {} for k in range(inputs.get('max_iter')): budgets_dynamic = update_budgets(inputs.get('total_cals'), inputs.get('meals_num'), inputs.get('snacks_num'), budget_weights_list) meal_budget = update_budgets(inputs.get('total_cals'), inputs.get('meals_num'), inputs.get('snacks_num'), budget_weights_list) if meal_type != 'snack': mealy, scorey, calsy, ut_scorey, penalty_lengthy, nutrition_density_listy, energy_density_listy, meal_similarity_penaltyy, meal_similarity_listy = build_meal( bank, meal_type, budgets_dynamic) if mealy and scorey and min(ut_scorey) > 0: scoreboard[meal_type] = mealy, scorey, calsy if scoreboard[meal_type][1] > score_tracker: score_tracker = scoreboard[meal_type][1] total_cals = scoreboard[meal_type][2] else: mealx, scorex, calsx, ut_scorex, penalty_lengthx, nutrition_density_listx, energy_density_listx, meal_similarity_penaltyx, meal_similarity_listx = build_meal( bank, meal_type, meal_budget) if mealx: scoreboard[ meal_type] = mealx, scorex, calsx, nutrition_density_listx, energy_density_listx, meal_similarity_penaltyx, meal_similarity_listx if scoreboard: meals[meal_type] = scoreboard[meal_type] for meal_name, whole_meal in scoreboard.items(): df = pd.concat(whole_meal[0]) df = pd.DataFrame(df.values.reshape(1, -1)) df['score'] = float(scoreboard[meal_type][1]) df['meal_cals'] = scoreboard[meal_type][2] if meal_name != 'snack': df['name'] = meal_name df['budget per meal'] = meal_budget.get('meal') df['meal budget utilization'] = (df['meal_cals'] / df['budget per meal']) df['average nutrtition'] = nutrition_density_listy df['average energy'] = energy_density_listy df['meal_similarity_penalty'] = meal_similarity_penaltyy df['meal_similarity_list'] = pd.Series([meal_similarity_listy]) df.set_index('name', drop=True, inplace=True) else: df['name'] = meal_name + " " + str(x) df['budget per snack'] = budgets_dynamic.get('snack') df['snack budget utilization'] = (df['meal_cals'] / df['budget per snack']) df['average nutrtition'] = nutrition_density_listx df['average energy'] = energy_density_listx df['meal_similarity_penalty'] = meal_similarity_penaltyx df['meal_similarity_list'] = pd.Series([meal_similarity_listx]) df.set_index('name', drop=True, inplace=True) if meal_name != 'snack': # rename all the budget as budget leftover so its carbs budget leftover etc. df['meal penalty length'] = penalty_lengthy df['carb budget per meal'] = int(meal_budget.get('Carbs')) df['carbs budget remaining'] = budgets_dynamic.get('Carbs') df['carb budget utilization'] = (meal_budget.get('Carbs') - budgets_dynamic.get( 'Carbs')) / meal_budget.get('Carbs') df['protein budget per meal'] = meal_budget.get('Protein') df['protein budget remaining'] = budgets_dynamic.get('Protein') df['protein budget utilization'] = (meal_budget.get('Protein') - budgets_dynamic.get( 'Protein')) / meal_budget.get('Protein') df['vegetable budget per meal'] = meal_budget.get('Vegetables') df['vegetable budget remaining'] = budgets_dynamic.get('Vegetables') df['vegetable budget utilization'] = (meal_budget.get('Vegetables') - budgets_dynamic.get( 'Vegetables')) / meal_budget.get('Vegetables') df['fat budget per meal'] = meal_budget.get('Fat_meal') df['fat budget remaining'] = budgets_dynamic.get('Fat_meal') df['fat budget utilization'] = (meal_budget.get('Fat_meal') - budgets_dynamic.get( 'Fat_meal')) / meal_budget.get('Fat_meal') else: if snacks.get('sweets') == "Yes": df['sweets budget per snack'] = meal_budget.get('Sweets') df['sweets budget remaining'] = budgets_dynamic.get('Sweets') df['sweets budget utilization'] = (meal_budget.get('Sweets') - budgets_dynamic.get( 'Sweets')) / meal_budget.get('Sweets') if snacks.get('Savoury_Snacks') == 'Yes': df['savoury budget per snack'] = meal_budget.get('Savoury_Snacks') df['savoury budget remaining'] = budgets_dynamic.get('Savoury_Snacks') df['savoury budget utilization'] = (meal_budget.get('Savoury_Snacks') - budgets_dynamic.get( 'Savoury_Snacks')) / meal_budget.get('Savoury_Snacks') if user_params.get('fruits') == 'No': df['fruits budget per snack'] = meal_budget.get('Fruits') df['fruits budget remaining'] = budgets_dynamic.get('Fruits') df['fruits budget utilization'] = (meal_budget.get('Fruits') - budgets_dynamic.get( 'Fruits')) / meal_budget.get('Fruits') df['fat budget per snack'] = meal_budget.get('Fat') df['fat budget remaining'] = budgets_dynamic.get('Fat') df['fat budget utilization'] = (meal_budget.get('Fat') - budgets_dynamic.get( 'Fat')) / meal_budget.get('Fat') filler.append(df) if meal_type == 'snack': user_snacks_num -= 1 else: user_meals_num -= 1 budgets_dynamic = update_budgets(float(inputs.get('total_cals') - total_cals), user_meals_num, user_snacks_num, budget_weights_list) df_meals = pd.concat(filler) df_final = df_meals.sort_values(by=['name', 'score'], ascending=[True, False]) df_final.rename(columns={0: "Item 1", 1: "Primary SN 1", 2: "Weight", 3: "Unit1", 4: "Amount1", 5: "Item 2", 6: "Primary SN 2", 7: "Weight", 8: "Unit2", 9: "Amount2", 10: "Item 3", 11: "Primary SN 3", 12: "Weight", 13: "Unit3", 14: "Amount3", 15: "Item 4", 16: "Primary SN 4", 17: "Weight", 18: "Unit4", 19: "Amount4"} , inplace=True) return df_final def displayMeal(data, mealType, items_meal_number, sncack_numbers, df_nutrition): menu = "" calories = 0 # hole day menu carbs = 0 protein = 0 fat = 0 if len(mealType) > 1: for meal in mealType: items, temp_calories, temp_carbs, temp_protein, temp_fat = getMeal(data, meal, items_meal_number, df_nutrition) calories += temp_calories menu = menu + items carbs = carbs + temp_carbs protein = protein + temp_protein fat = fat + temp_fat # one meal for the user else: menu, calories, carbs, protein, fat = getMeal(data, mealType[0], items_meal_number, df_nutrition) return menu, carbs, protein, fat snacks, calories_snack, carbs_temp, temp_protein, temp_fat = getSnack(data, sncack_numbers, df_nutrition) carbs = carbs + carbs_temp protein = protein + temp_protein fat = fat + temp_fat menu = menu + snacks calories += calories_snack menu = menu + "\n*סך הכל קלוריות -> " + str(round(calories, 2)) + '*' return menu, carbs, protein, fat def getMeal(data, meal_type, meal_items_nubmer, df_nutrition): # item[0]-> food name # item[1]-> serail number # item[2]-> unit # item[3]-> amount # item[4]-> Weight dic = {'breakfast': 'ארוחת בוקר', 'lunch': 'ארוחת צהריים', 'dinner': 'ארוחת ערב'} global temp_meal temp_meal = data[data.index == meal_type] index_number = 1 for index, row in temp_meal.iterrows(): if isinstance(row['Item 3'], str) and isinstance(row['Item 2'], str) and isinstance( row['Item 1'], str): temp_meal = temp_meal.head(index_number).tail(1) break index_number += 1 if len(temp_meal.index) > 1: temp_meal = temp_meal.head(1) items, items_number = get_items(temp_meal, meal_items_nubmer) protein, fat, carbs, calories = get_calories(df_nutrition, items) if items_number == 4: return "*" + dic[meal_type] + "*:\n1. " + buildItem(items['item1']) + "\n2. " + buildItem( items["item2"]) + "\n3. " + buildItem( items['item3']) + "\n4. " + buildItem( items['item4']) + "\nכמות קלוריות ->" + str(calories) + "\n\n", calories, carbs, protein, fat return "*" + dic[meal_type] + "*:\n1. " + buildItem(items['item1']) + "\n2. " + buildItem( items["item2"]) + "\n3. " + buildItem( items['item3']) + "\nכמות קלוריות ->" + str(calories) + "\n\n", calories, carbs, protein, fat def get_items(temp_meal, items_number): meal = {} items_number_temp = None for index in range(1, items_number + 1): item = temp_meal['Item ' + str(index)].iloc[0] if isinstance(item, str): items_number_temp = index meal['item' + str(index)] = [temp_meal['Item ' + str(index)].iloc[0], temp_meal['Primary SN ' + str(index)].iloc[0], temp_meal['Unit' + str(index)].iloc[0], int(temp_meal['Amount' + str(index)].iloc[0]), float(temp_meal['Weight'].values[0][index - 1])] else: break return meal, items_number_temp def getSnack(snackData, snack_number, df_nutrition): # get the line of each snack snack1 = snackData[snackData.index == "snack 1"] snack2 = snackData[snackData.index == "snack 2"] # get the items snack1_, x = get_items(snack1, snack_number) snack2_, y = get_items(snack2, snack_number) protein1, fat1, carb1, snack1_calories = get_calories(df_nutrition, snack1_) protein2, fat2, carb2, snack2_calories = get_calories(df_nutrition, snack2_) carb1 = int(carb1) protein1 = int(protein1) fat1 = int(fat1) snack1_calories = int(snack1_calories) carb2 = int(carb2) protein2 = int(protein2) fat1 = int(fat1) snack2_calories = int(snack2_calories) if snack_number == 2: return "*ארוחת ביניים 1*:\n1. " + buildItem(snack1_['item1']) + "\n2. " + buildItem( snack1_['item2']) + "\nכמות קלוריות -> " + str(snack1_calories) + "\n\n*ארוחת ביניים 2*:\n1." + buildItem( snack2_['item1']) + "\n2. " + buildItem( snack2_['item2']) + "\nכמות קלוריות -> " + str( snack2_calories) + "\n\n", snack1_calories + snack2_calories, carb1 + carb2, protein1 + protein2, fat1 + fat2 return "*ארוחת ביניים *:\n1. " + buildItem(snack1_['item1']) + "\n2. " + buildItem( snack2_['item1']) + "\nכמות קלוריות -> " + str( snack1_calories) + "\n", snack1_calories, carb1, protein1, fat1 def buildItem(item): if item[0] is not 'NaN' and item[2] is not 'Nan' and item[0] is not 'nan' and item[2] is not 'nan': item_temp = item[0] amount = str(item[4]) if ' ' in item[2]: unit_temp = item[2].split(' ')[0] else: unit_temp = item[2] if '.0' in amount: amount = amount[:amount.find('.')] return str(item[0]) + " " + str(item[3]) + " " + unitHebrew(item[2], item[ 3]) + ' (' + unit_temp + ' אחת -> ' + amount + ' גרם)' def unitHebrew(unit, amount): unit_dic = {"כף": 'כפות', "מנה": 'מנות', "יחידה קטנה": 'יחידות קטנות', "פרח": 'פרחים', "פרוסה בינונית": 'פרוסות בינונוית', "יחידה": 'יחידות', "כף גדושה": 'כפות גדושות', "פרוסה": 'פרוסות', "מנה קטנה": 'מנות קטנות', "יחידה בינונית": 'יחידות בינוניות', "כפית": 'כפיות', "כוס": 'כוסות', "כוס קצוץ": 'כוסות'} if unit not in unit_dic: return unit if amount > 1: unit_temp = unit_dic[unit].strip() if unit_temp.count(' ') == 1: return unit_temp unit_temp = unit_temp.replace(' ', '') unit_temp = unit_temp.replace(' ', '') unit_temp = unit_temp[:unit_temp.find('ת') + 1] + ' ' + unit_temp[unit_temp.find('ת') + 1:] # one word if unit_temp.count('ת') == 1: return unit_temp.strip() return unit_temp return unit def get_calories(df_nutrition, items): # calculating the cake diagram feature # 1 gram fat is 9 calories # 1 gram protein is 4 calories # 1 gram carb is 4 calories # item[0]-> food name # item[1]-> serail number # item[2]-> unit # item[3]-> amount # item[4]-> Weight CARBS_GRAMS_CALOIRES = 4 PROTEIN_GRAMS_CALOIRES = 4 FAT_GRAMS_CALOIRES = 9 carbs = 0 protein = 0 fat = 0 count = 1 for _, item in items.items(): item_serail_number = str(item[1]) nutritional_value = item[3] * (item[4] / 100) carbs_temp = df_nutrition[df_nutrition['smlmitzrach'] == item_serail_number]['carbs'].iloc[0] protein_temp = df_nutrition[df_nutrition['smlmitzrach'] == item_serail_number]['protein'].iloc[0] fat_temp = df_nutrition[df_nutrition['smlmitzrach'] == item_serail_number]['total_fat'].iloc[0] carbs += carbs_temp * nutritional_value protein += protein_temp * nutritional_value fat += fat_temp * nutritional_value count += 1 # calulate the Nutritional values of the meal carbs = int(carbs) protein = int(protein) fat = int(fat) calories = round(carbs * CARBS_GRAMS_CALOIRES + protein * PROTEIN_GRAMS_CALOIRES + fat * FAT_GRAMS_CALOIRES, 2) return protein, fat, carbs, calories def core_fun(meal_type, title=""): global snacks, user_params, units_thr, type_thr, budget_weights_meals, budget_weights_snacks_fruits_fat, budget_weights_savoury_snacks, budget_weights_sweets, inputs, display_user_parameter, debug global user_meals_num, total_cals, user_snacks_num, candidate_calories, scoring global df_noa, df_tzameret_food_group, df_weights, df_nutrition pd.set_option("display.precision", 2) warnings.filterwarnings("ignore") # Dictionary that is equivalent to user inputs and filters the df_noa Database based on the inputs user_params = {'eggs': 'No', # If eggs = Yes, filters out all the meals with eggs 'vegetables': 'No', # If vegetables = Yes, fiters out all meals with vegetables 'fruits': 'No', # If fruits = Yes, filters out all snacks and meals with fruits and snacks don't have fruits as a category 'dairy': 'No', # If dairy = Yes, filters out all the dairy items 'beef_chicken_fish': 'No', # If beef_chicken_fish = Yes, filters out all the meals with beef chicken or fish # For remaining if Yes, filters only the food its for (i.e if kosher = Yes, only shows kosher food) 'kosher': 'Yes', 'halal': 'Yes', 'vegetarian': 'No', 'vegan': 'No', 'ketogenic': 'No', 'paleo': 'No', 'mediterranean': 'No', 'lactose_free': 'No', 'gluten_free': 'No', 'milk_free': 'No', 'wheat_free': 'No', 'egg_free': 'No', 'soy_free': 'No', 'tree_nut_free': 'No', 'peanut_free': 'No', 'fish_free': 'No', 'shellfish_free': 'No'} # Dictionary to see if want to add certain snack elements to the snacks on the scoreboard snacks = {'sweets': 'No', 'Savoury_Snacks': 'Yes'} # Threshold for the build meal to stop looking for another item (If there are only 20 Carb calories left the meal exits the Carb code and moves to Protein): units_thr = {'Carbs': 25, 'Protein': 10, 'Vegetables': 10, 'Fat': 25, 'Fruits': 25, 'Sweets': 25, 'Savoury_Snacks': 25} # Another threshold for build meal to stop looking for another item in the category if there is less budget than the threshold: type_thr = {'Carbs': 25, 'Protein': 10, 'Vegetables': 10, 'Fat': 25, 'Fruits': 25, 'Sweets': 25, 'Savoury_Snacks': 25} # For snacks its either fruits and fat or savoury or sweets budget_weights_meals = {'Carbs': 0.4, 'Protein': 0.5, 'Vegetables': 0.2} budget_weights_snacks_fruits_fat = {'Fruits': 0.7, 'Fat': 0.4} budget_weights_savoury_snacks = {'Savoury_Snacks': 1.1} budget_weights_sweets = {'Sweets': 1.1} scoring = {'legacy': False, # legacy scoring system composed of budget utilization 'legacy_nut': True, # legacy scoring system with a bonus based on average nutritional density 'legacy_ene': False, # legacy scroing system with a bonus based on higher energy density 'legacy_nut_ene': False # legacy scoring system with a bonus based on nutrtion density and energy density with higher density the better } # User inputs that control different variables: inputs = {'for_noa_gid': 2106834268, # Gid that controls which for noa tab is shown, to switch just paste another Gid 'budget_var': 0.3, # Budget variable to see the weighting for snacks and individual meals 'item_thr': 4, # Threshold used to decided when to break code if there is less than 5 total budget left 'max_items_meal': 4, # Max amount of items per meal 'max_items_snack': 2, # Max amount of items per snack 'penalty_weight': 1, # Penalty weight for the meal score if the meal doesnt take the first option at the intersection of mida max amount meal 'nutrition_bonus': 0.1, # Bonus multiplier for the average nutrition density 'energy_bonus': 0.2, # Bonus multiplier for the average energy density 'meal_similarity_penalty': 0.3, # Penalty for having mutliple of the same category of meal items in the same meal 'max_iter': 7, # Number of meals for each meal type in the scoreboard 'meals_num': 3, # Number of different meal types and meals - will always be 3 'snacks_num': 2, # number of snacks in the final scoreboard 'meat_egg_same_day_penalty': 0.2, # Peanlty if the top meal has eggs or meat and another meal the same day also has eggs and meat 'extra_penalty': 0.2, # Penalty if there is less than 0.7 of each category for the budget is used 'meal_penalty_length': 0.1, # Penalty given if a meal is longer than 4 items and this is the weighting 'total_cals': 2000 # total calories in the budget for the day } debug = {'debug_en': True} # Used for finding bugs in code. Set to True for code to run properly # Toggle to show the user values in a DataFrame display_user_parameter = {'display_user_parameter': False} df_noa, df_tzameret_food_group, df_weights, df_nutrition = import_sheets(False) df_main = build_meal_wrapper() items, carbs, protein, fat = displayMeal(df_main, meal_type, inputs['max_items_meal'], inputs['max_items_snack'], df_nutrition) data = {'חלבון': protein, 'פחמימות': carbs, 'שומן': fat} items_temp = items.split('\n') items2 = '' for line in items_temp: if 'nan' not in line and 'nan nan nan' not in line: if 'ארוחת' in line or 'סך' in line: line = '\n' + line items2 += line + '\n' url = iniliatize_Diagram(title, data) return items, url def check_intent_key(tracker, max_iter=10): list_intents = [] check_intent = ['nutrition_meal_question', 'nutrition_many_xy_in_food'] index = 1 for event in tracker.events: if event.get("event") == "user": intent = event.get("parse_data").get("intent").get("name") list_intents.append(intent) if index == max_iter: break for intent in reversed(list_intents): if intent in check_intent: return intent return '' # ------------------------------------------------------------------ class OtherOptions(Action): def name(self) -> Text: return "action_nutrition_other_options" def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: intents_dict = {"nutrition_many_xy_in_food": "action_nutrition_many_xy_in_food", "nutrition_meal_question": "action_nutrition_meal_question"} next_action = None previous_intent = None intent = check_intent_key(tracker) if intent == 'nutrition_meal_question': previous_intent = intent next_action = intents_dict[previous_intent] if intent == 'nutrition_many_xy_in_food': previous_intent = intent next_action = intents_dict[previous_intent] return [FollowupAction(next_action), SlotSet("previous_intent", previous_intent)] # ------------------------------------------------------------------ class Actionnutritionalvalues(Action): def name(self) -> Text: return "action_nutritional_values_food" def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: try: tic() prediction = tracker.latest_message entity = prediction['entities'][0]['value'] # db_dict = load_db_googleSheet(0x13) db_dict = get_tables('0x13') dietary_fiber_dict = {'food_energy': ['אנרגיה', 'קלוריות'], 'total_fat': ['שומן', 'גרם'], 'carbohydrates': ['פחמימות', 'גרם'], 'protein': ['חלבון ', 'גרם'], 'total_sugars': ['סוכר', 'גרם'], 'iron': ['ברזל', 'מ"ג'], 'calcium': ['סידן', 'מ"ג'], 'sodium': ['נתרן ', 'מ"ג'], 'total_dietary_fiber': ['סיבים תזונתיים', 'גרם'], 'vitamin_c': ['ויטמין סי', 'מ"ג'], 'vitamin_b12': ['ויטמין בי 12', 'מק"ג']} db_df = db_dict['tzameret'] entity_name = db_dict['common_food'][db_dict['common_food'].index == entity]['shmmitzrach'].iloc[0] res = '*ערכים תזונתיים של ' + entity_name + ' ל100 גרם ' + ':*\n' smlmitzrach_number = int( db_dict['common_food'][db_dict['common_food'].index == entity]['smlmitzrach'].iloc[0]) values = db_df[db_df['smlmitzrach'] == str(smlmitzrach_number)].iloc[0] for dietary, list in dietary_fiber_dict.items(): value = values[dietary] res += '- ' + list[0] + ': ' + str(round(value, 1)) + ' ' + list[1] + '\n' res = res_timer(res, tracker) except Exception as e: res = 'אין לי מושג כמה, מצטער!' res = res_error(res, tracker, e) dispatcher.utter_message(res) return [SlotSet("x", None), SlotSet("y", None), SlotSet("previous_intent", None)] # ------------------------------------------------------------------ class Actionnutritionmanyxyinfood(Action): def name(self) -> Text: return "action_nutrition_many_xy_in_food" def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: user_intent = tracker.latest_message.get('intent').get('name') db_dict = load_db_googleSheet(0x3) # db_dict = get_tables('0x3') lut = db_dict['lut'] tzameret = db_dict['tzameret'] user_msg = tracker.latest_message.get('text') count = 1 iteation_count = 1 index = 1 food_groups_numbers_tzameret_dict = {} start = 0 number_of_food = 35 food_list_babies = ['מטרנה', 'סימילק', 'תמ"ל', 'תמ"י', 'תמל', 'תמי', 'סימילאק', 'אבקה'] # get the indexes of the foods for serail in tzameret['smlmitzrach']: index += 1 if iteation_count == 1: start = index iteation_count += 1 else: serail_temp = int(serail) serail_temp += 1 serail_temp = str(serail_temp) if serail_temp[0] != str(count): end = index food_groups_numbers_tzameret_dict[count] = [start, end] count += 1 iteation_count = 1 if serail_temp[0] == 8 or serail_temp[0] == 9: break try: if user_intent != "nutrition_many_xy_in_food": x = tracker.get_slot('x') if tracker.get_slot('x') else None nut1_temp = x[0] nut2_temp = x[1] else: prediction = tracker.latest_message entity = prediction['entities'][0]['value'] # get the entity from the question if entity is None: if 'יש' in user_msg: entity = user_msg[user_msg.find('יש') + 2:] if 'הרבה' in user_msg: entity = user_msg[user_msg.find('הרבה') + 4:] for r in (("יש", ""), ("הרבה", ""), ("וגם", ""), (" ", " "), ("בהם", "")): entity = entity.replace(*r).strip() if ' ' in entity: entity = entity.replace(' ', ' ') if entity.count(' ') > 1: list = entity.split(' ') if 'ויטמין' == list[0]: nut1 = list[0] + ' ' + list[1] nut2 = list[2] if 'ויטמין' == list[1]: nut1 = list[1] + ' ' + list[2] nut2 = list[0] else: nut1, nut2 = entity.split(' ') if nut2[0] == 'ו': nut2 = nut2[1:] nut1_temp = nut1.strip() nut2_temp = nut2.strip() # get the entity in english nut1 = lut[lut.index == nut1_temp]['Entity'].values[0] nut2 = lut[lut.index == nut2_temp]['Entity'].values[0] tzameret = tzameret[['shmmitzrach', nut1, nut2]] df = [] for i in range(1, len(food_groups_numbers_tzameret_dict) + 1): for j in range(1, number_of_food): index_temp = random.randint(food_groups_numbers_tzameret_dict[i][0], food_groups_numbers_tzameret_dict[i][1]) db_food_temp = tzameret[tzameret.index == index_temp] item_name = db_food_temp['shmmitzrach'].iloc[0] if any(x in item_name for x in food_list_babies): continue else: nut1_value = db_food_temp[nut1].values[0] nut2_value = db_food_temp[nut2].values[0] if nut1_value != 0 and nut2_value != 0: db_food_temp.insert(3, 'sum', db_food_temp[nut1].values[0] + db_food_temp[nut2].values[0]) df.append(db_food_temp) db_food = pd.concat(df) db_food = db_food.drop_duplicates(subset='shmmitzrach') db_food_nut1 = db_food.sort_values(by=[nut1], ascending=False).head(5) db_food_nut2 = db_food.sort_values(by=[nut2], ascending=False).head(5) db_food_nut1_nut2 = db_food.sort_values(by=['sum'], ascending=False).head(5) res1 = ' במאכלים הבאים יש הרבה ' + nut1_temp + '\n' res2 = ' במאכלים הבאים יש הרבה ' + nut2_temp + '\n' res3 = ' במאכלים הבאים יש הרבה ' + nut1_temp + ' ו' + nut2_temp + '\n' res1 += getcolums_FromDataFrame(db_food_nut1, 'shmmitzrach') res2 += getcolums_FromDataFrame(db_food_nut2, 'shmmitzrach') res3 += getcolums_FromDataFrame(db_food_nut1_nut2, 'shmmitzrach') res = res1 + '\n\n' + res2 + '\n\n' + res3 res = res_timer(res, tracker) except Exception as e: res = "אין לי מושג כמה, מצטער!" res = res_error(res, tracker, e) dispatcher.utter_message(res) return [SlotSet("x", [nut1_temp, nut2_temp]), SlotSet("y", None), SlotSet("previous_intent", None)] # ------------------------------------------------------------------ class Actionhowmanyxyinz(Action): def name(self) -> Text: return "action_nutrition_howmanyxyinz" def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: try: tic() user_msg = tracker.latest_message.get('text') two_nutrient = None z = None # db_dict = load_db_googleSheet(0x293) db_dict = get_tables('0x293') prediction = tracker.latest_message two_nutrient = prediction['entities'][0]['value'] x, y = two_nutrient.split(' ו') x = x.strip() y = y.strip() regex_res = re.search('כמה (.*) יש ב(.*)', user_msg.replace('?', '')) if regex_res: if two_nutrient is None: x, y = regex_res.group(1) x = x.strip() y = y.strip() z = regex_res.group(2) regex_res = re.search('כמה (.*) ב(.*)', user_msg.replace('?', '')) if regex_res: if two_nutrient is None: x, y = regex_res.group(1) x = x.strip() y = y.strip() z = regex_res.group(2) regex_res = re.search('מה הכמות של (.*) ב(.*)', user_msg.replace('?', '')) if regex_res: if two_nutrient is None: x, y = regex_res.group(1) x = x.strip() y = y.strip() z = regex_res.group(2) y = y[:len(y)] # get the units from the user message user_msg_temp = user_msg[user_msg.find(two_nutrient) + len(two_nutrient) + 1:len(user_msg)].replace('?', '') food1_units = "100 גרם" regex_units_res1 = re.search('ב(.*) של', user_msg_temp) regex_units_res2 = re.search(' (.*) של', user_msg_temp) if regex_units_res1: food1_units = regex_units_res1.group(1) elif regex_units_res2: food1_units = regex_units_res2.group(1) if food1_units in db_dict['food_units_aliases']['Unit Alias'].values: food1_units = db_dict['food_units_aliases'][db_dict['food_units_aliases']['Unit Alias'] == food1_units][ 'Zameret unit'].values[0] val1, res1 = how_many_x_in_y_core(x, z, food1_units, self.name(), tracker) val2, res2 = how_many_x_in_y_core(y, z, food1_units, self.name(), tracker) res1 = checkDoublePattern(res1, 'קלוריות') res2 = checkDoublePattern(res2, 'קלוריות') res1 = checkPrecentinres(res1, x) res2 = checkPrecentinres(res2, y) res = '' res += res1 res += "\n" res += res2 res = res_timer(res, tracker) except Exception as e: res = "אין לי מושג כמה, מצטער!" res = res_error(res, tracker, e) dispatcher.utter_message(res) return [SlotSet("x", None), SlotSet("y", None), SlotSet("previous_intent", None)] # ------------------------------------------------------------------ class Actioncompartiontwofoods(Action): def name(self) -> Text: return "action_nutrition_compare_foods" def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: try: tic() user_msg = tracker.latest_message.get('text') entities = tracker.latest_message.get('entities') x = None y1 = None y2 = None more_or_less = 'יותר' if 'יותר' in user_msg else 'פחות' # db_dict = load_db_googleSheet(0x293) db_dict = get_tables('0x293') for ent in entities: if ent['entity'] in db_dict['lut']["action_nutrition_compare_foods"].values: x = ent['value'] elif ent['entity'] in db_dict['lut']["action_nutrition_compare_foods"].values: y1, y2 = ent['value'].split('או') y1 = y1.strip() y2 = y2.strip() y1, y2 = user_msg[user_msg.find(x) + len(x):len(user_msg)].replace('?', '').split(' או ') y1 = y1.strip() y2 = y2.strip() if '-' in user_msg: y1 = y1[2:] else: y1 = y1[1:] if 'בב' in y1 or (y1[0] == 'ב' and y1[1] != 'ב' and 'בשר' not in y1): y1 = y1[1:len(y1)] if 'בב' in y2 or (y2[0] == 'ב' and y2[1] != 'ב' and 'בשר' not in y2): y2 = y2[1:len(y2)] if not y1 or not y2: regex_res = re.search('במה יש (פחות|יותר) .* ב(.*)', user_msg.replace('?', '')) if regex_res: more_or_less = regex_res.group(1) y1, y2 = regex_res.group(2).split('או') y1 = y1.strip() y2 = y2.strip() food1_units = "100 גרם" food2_units = "100 גרם" for k, y in enumerate([y1, y2]): regex_units_res = re.search('(.*) של (.*)', y) if regex_units_res: if k == 0: food1_units = regex_units_res.group(1) y1 = regex_units_res.group(2) else: food2_units = regex_units_res.group(1) y2 = regex_units_res.group(2) if food1_units in db_dict['food_units_aliases']['Unit Alias'].values: food1_units = \ db_dict['food_units_aliases'][db_dict['food_units_aliases']['Unit Alias'] == food1_units][ 'Zameret unit'].values[0] if food2_units in db_dict['food_units_aliases']['Unit Alias'].values: food2_units = \ db_dict['food_units_aliases'][db_dict['food_units_aliases']['Unit Alias'] == food2_units][ 'Zameret unit'].values[0] val1, res1 = how_many_x_in_y_core(x, y1, food1_units, self.name(), tracker) val2, res2 = how_many_x_in_y_core(x, y2, food1_units, self.name(), tracker) res1 = checkDoublePattern(res1, 'קלוריות') res2 = checkDoublePattern(res2, 'קלוריות') ys = (y1, y2) vals = (val1, val2) res = 'ב%s יש %s %s' % (ys[np.argmax(vals) if more_or_less == 'יותר' else np.argmin(vals)], more_or_less, x) if 'ב ב' in res and 'בבשר' not in res: res = res[1:] res += "\n" res += res1 res += "\n" res += res2 res = res_timer(res, tracker) except Exception as e: res = "אין לי מושג כמה, מצטער!" res = res_error(res, tracker, e) dispatcher.utter_message(res) return [SlotSet("x", None), SlotSet("y", None), SlotSet("previous_intent", None)] # ------------------------------------------------------------------ class Actionwhataboutx(Action): def name(self) -> Text: return "action_nutrition_and_what_about_x" def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: try: # get the right actions according to the intent intens_dict = {"nutrition_howmanyxiny": "action_nutrition_howmanyxiny", "nutrition_meal_question": "action_nutrition_meal_question", "nutrition_is_food_healthy": "action_nutrition_is_food_healthy", "nutrition_get_rda": "action_nutrition_get_rda", "nutrition_get_upper_limit": "action_nutrition_get_rda"} user_messge = tracker.latest_message.get('text') previous_intent = tracker.get_slot('previous_intent') entity_value = None slot = "x" next_action = intens_dict[previous_intent] # meal question if previous_intent == "nutrition_meal_question": return [FollowupAction(next_action), SlotSet(slot, user_messge), SlotSet('previous_intent', 'nutrition_and_what_about_x')] # ------------------------------------------------ entity = None entity_value = None # db_dict = load_db_googleSheet(0x2) db_dict = get_tables('0x2') lut_df = db_dict['lut'] nutrients = lut_df['Entity'].head(79) # check if rasa detect the entity if len(tracker.latest_message.get('entities')) != 0: prediction = tracker.latest_message entity_value = prediction['entities'][0]['value'] entity = prediction['entities'][0]['entity'] if entity_value is None: if 'ברזל' in user_messge: entity_value = 'ברזל' entity = 'nutrient' elif user_messge[0] == 'ו' and user_messge[1] != 'ב': entity_value = user_messge[1:] else: entity_value = user_messge[2:] if entity_value is None or entity_value == "": entity_value = user_messge # how many x in y if previous_intent == "nutrition_howmanyxiny": # rasa succeed to detect the entity if entity is not None: if entity == 'nutrient': slot = "x" else: slot = "y" # the entity value is taken from the user message else: if entity_value in nutrients: slot = "x" else: slot = "y" return [FollowupAction(next_action), SlotSet(slot, entity_value), SlotSet('previous_intent', 'nutrition_and_what_about_x')] return [FollowupAction(next_action), SlotSet(slot, entity_value)] except Exception as e: res = "אין לי מושג, מצטער!" res = res_error(res, tracker, e) dispatcher.utter_message(text=res) return [SlotSet("x", None), SlotSet("y", None), SlotSet("previous_intent", None)] # ------------------------------------------------------------------ class ActionWhatXCanBeInY(Action): def name(self) -> Text: return "action_nutrition_what_xcanbeiny" def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: try: tic() negative_words_list = ['ללא', 'לא', 'בלי', 'וללא'] entities_temp = {} meal = "" # db_dict = load_db_googleSheet(0x402) db_dict = get_tables('0x402') lut = db_dict['lut'] df_noa = db_dict['food_units_features'] message = tracker.latest_message.get('text') # get the meal type if 'בוקר' in message: meal = "IL_Breakfast" if 'צהריים' in message or 'צהרים' in message: meal = "IL_Lunch" if 'ערב' in message: meal = 'IL_Dinner' # get the entity value from the bot prediction = tracker.latest_message entities_list = prediction['entities'] if len(entities_list) == 1: entity = entities_list[0]['value'] if ' ' not in entity: entities = [entity] filter_type = any(ele in message for ele in negative_words_list) for index, ent in enumerate(entities): entities_temp[index] = fliter_type(ent, lut, filter_type) else: entities_temp = get_entity_filters(entity, negative_words_list, lut) else: for index in range(len(entities_list)): entity = entities_list[index]['value'] entities_temp_2 = get_entity_filters(entity, negative_words_list, lut) entities_temp.update(entities_temp_2) # get the food accroding to the user selection items = df_noa.loc[df_noa[meal] == 'Yes'] for key, value in entities_temp.items(): items = items.loc[items[value[0]] == value[1]] # get the items by ranmdom 5 of them indeX = items.index.tolist() res = "" for i in range(1, 7): temp = random.randint(0, len(items) - 1) res += str(i) + ". " + str(items[items.index == indeX[temp]]['Food_Name'].values[0]) + "\n" res = res_timer(res, tracker) dispatcher.utter_message(text=res) except Exception as e: res = "אין לי מושג, מצטער!" res = res_error(res, tracker, e) dispatcher.utter_message(text=res) return [SlotSet("x", None), SlotSet("y", None), SlotSet("previous_intent", None)] # ------------------------------------------------------------------ class ActionMealQuestion(Action): def name(self) -> Text: return "action_nutrition_meal_question" def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: try: tic() meal = [] previous_intent = "" message = None words = ['בוקר', 'צהריים', 'ערב', 'יומי', 'יום', 'תפריט'] user_intent = tracker.latest_message.get('intent').get('name') if user_intent != "nutrition_meal_question": x = tracker.get_slot('x') if tracker.get_slot('x') else None if x is not None: if tracker.get_slot('previous_intent') == "nutrition_other_options" and any( ele in string for ele in words): message = x else: message = tracker.latest_message.get('text') if tracker.latest_message.get('text') else None title = '' if message is None: # get the message from the user in the meal action message = tracker.get_slot('x') if tracker.get_slot('x') else None if message is not None: if 'בוקר' in message: meal = ['breakfast'] title = 'ארוחת בוקר' elif 'צהריים' in message or 'צהרים' in message: meal = ['lunch'] title = 'ארוחת צהריים' elif 'ערב' in message: meal = ['dinner'] title = 'ארוחת ערב' elif 'יומי' in message or 'תפריט' in message or 'יום' in message or 'תפריט' in message: meal = ['breakfast', 'lunch', 'dinner'] title = 'תפריט יומי' res, url = core_fun(meal, title) res = res_timer(res, tracker) dispatcher.utter_message(text="%s" % res, image=url) except Exception as e: res = "אין לי מושג, מצטער!" res = res_error(res, tracker, e) dispatcher.utter_message(text=res) return [SlotSet("x", title), SlotSet("y", None), SlotSet("previous_intent", "nutrition_meal_question")] # ------------------------------------------------------------------ class ActionTimer(Action): def name(self) -> Text: return "action_timer" def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: timer_state_str = "n/a" for ent in tracker.latest_message.get('entities'): if ent['entity'] == 'timer_state': timer_state_str = ent['value'] break if tracker.latest_message.get('intent').get('name') == 'timer_start': timer_state_str = 'on' dispatcher.utter_message(text="מצב הטיימר עודכן בהצלחה (%s)" % timer_state_str) return [SlotSet("timer_state", timer_state_str)] # ------------------------------------------------------------------ class ActionSimpleQuestion(Action): def name(self) -> Text: return "action_simple_question" def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: try: tic() # db_dict = load_db_googleSheet(0x6) db_dict = get_tables('0x6') lut_df = db_dict['lut'] user_intent = tracker.latest_message.get('intent').get('name') simple_entity = None for ent in tracker.latest_message.get('entities'): if ent['entity'] in lut_df[self.name()].values and ent['value'] in lut_df['Entity']: simple_entity = ent['value'] res = simpleQuestionAnswer(tracker, simple_entity, db_dict, user_intent) res = res_timer(res, tracker) dispatcher.utter_message(text="%s" % res) except Exception as e: res = "אין לי מושג, מצטער!" res = res_error(res, tracker, e) dispatcher.utter_message(text=res) return [SlotSet("x", None), SlotSet("y", None), SlotSet("previous_intent", None)] # ------------------------------------------------------------------ class ActionGetRDAQuestion(Action): def name(self) -> Text: return "action_nutrition_get_rda" def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: try: tic() user_intent = tracker.latest_message.get('intent').get('name') intent_upper = user_intent == 'nutrition_get_upper_limit' previous_intent = tracker.get_slot('previous_intent') if tracker.get_slot('previous_intent') else None if previous_intent == "nutrition_get_upper_limit" or previous_intent == "nutrition_get_rda": intent = previous_intent else: intent = user_intent rda_val, rda_units, rda_text, rda_status, nutrient, image = get_rda(self.name(), tracker, intent_upper) if rda_val > 0: intent_upper_str = "המקסימלית" if intent_upper else "המומלצת" res = "הקצובה היומית %s של %s %s היא\r %.2f %s" % \ (intent_upper_str, nutrient, get_personal_str(rda_status, tracker), rda_val, rda_units) res += "\r" res += rda_text if rda_text else "" else: if rda_text: res = rda_text else: res = "אין לי מושג, מצטער!" res = res_timer(res, tracker) dispatcher.utter_message(text="%s" % res, image=image) except Exception as e: res = "אין לי מושג, מצטער!" res = res_error(res, tracker, e) dispatcher.utter_message(text=res) return [SlotSet("previous_intent", intent), SlotSet("x", ""), SlotSet("y", "")] # ------------------------------------------------------------------ class ActionNutritionHowManyXinY(Action): def name(self) -> Text: return "action_nutrition_howmanyxiny" def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: try: tic() db_dict = get_tables('0x293') # db_dict = load_db_googleSheet(0x293) df_tzameret_food_group = pd.read_csv( "https://docs.google.com/spreadsheets/d/19rYDpki0jgGeNlKLPnINiDGye8QEfQ4IEEWSkLFo83Y/export?format=csv&gid=428717261") db_df = db_dict['tzameret'] lut_df = db_dict['lut'] common_df = db_dict['common_food'] units_df = db_dict['food_units'] units_aliases_df = db_dict['food_units_aliases'] y = None x = None user_msg = tracker.latest_message.get('text') user_intent = tracker.latest_message.get('intent').get('name') intent_upper = user_intent == 'nutrition_get_upper_limit' # -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- # Fetch X and Y (from slots, from entities or from regex): if tracker.get_slot('previous_intent') == 'nutrition_and_what_about_x': x = tracker.get_slot('x') if tracker.get_slot('x') else None if tracker.latest_message.get('entities') or tracker.get_slot('y'): y = tracker.get_slot('y') if tracker.get_slot('y') else None name_xy = self.name() + "_x" for ent in tracker.latest_message.get('entities'): if ent['entity'] in lut_df[self.name() + "_x"].values: x = ent['value'] name_xy = self.name() + "_x" elif ent['entity'] in lut_df[self.name() + "_y"].values: y = ent['value'] name_xy = self.name() + "_y" regex_res = re.search('כמה (.*) יש ב(.*)', user_msg.replace('?', '')) if regex_res: x = regex_res.group(1) y = regex_res.group(2).strip() if not y: regex_res = re.search('.* ב(.*)', user_msg.replace('?', '')) if regex_res: y = regex_res.group(1).strip() if not y or not x: user_msg_temp = user_msg[user_msg.find('כמה') + 3:] regex_res = re.search('(.*) ב(.*)', user_msg_temp.replace('?', '')) if regex_res: x = regex_res.group(1).strip() y = regex_res.group(2).strip() food_units = "גרם" regex_units_res = re.search('(.*) של (.*)', y) if y else None if regex_units_res: food_units = regex_units_res.group(1) y = regex_units_res.group(2) if food_units in units_aliases_df['Unit Alias'].values: food_units = units_aliases_df[units_aliases_df['Unit Alias'] == food_units]['Zameret unit'].values[ 0] # -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- y_common = y if y in common_df.index: y_common = common_df[common_df.index == y]['shmmitzrach'][0] else: y_food = ' '.join(y.split(' ')[1:]) food_units = units_aliases_df[units_aliases_df['Unit Alias'] == y.split(' ')[0]]['Zameret unit'] if food_units.empty: food_units = y.split(' ')[0] else: food_units = food_units.values[0] if y_food in common_df.index: y_common = common_df[common_df.index == y_food]['shmmitzrach'][0] else: y_common = y_food food = db_df[db_df['shmmitzrach'].str.contains(y_common)].iloc[0, :] feature = lut_df[lut_df.index == x]["Entity"][0] units = lut_df[lut_df.index == x]["Units"][0] first_digit_in_smlmitzrach = int(food['smlmitzrach'][0]) mida = \ df_tzameret_food_group[df_tzameret_food_group['ספרה ראשונה בקוד'] == first_digit_in_smlmitzrach].iloc[ 0][ 'mida_maxAmount_meal'] mida = re.sub('_[123456789]', '', mida) mida = mida.replace(' ', '') mida = mida.split(',') units_df = units_df[units_df['smlmitzrach'] == int(food['smlmitzrach'])] shmmida = '' mishkal = '' for i in mida: if int(i) in units_df['mida'].values: shmmida = units_df[units_df['mida'] == int(i)]['shmmida'].iloc[0] mishkal = units_df[units_df['mida'] == int(i)]['mishkal'].iloc[0] food_units_factor = int(mishkal) / 100 fat_calories = round(food['total_fat'] * food_units_factor, 2) protein_calories = round(food['protein'] * food_units_factor, 2) carbs_calories = round(food['carbohydrates'] * food_units_factor, 2) data = {'חלבון': protein_calories, 'פחמימה': carbs_calories, 'שומן': fat_calories} if x == 'קלוריות': val = fat_calories * 9 + protein_calories * 4 + carbs_calories * 4 else: val = food[feature] * food_units_factor val = round(val, 2) food_units = "גרם" mishkal = int(mishkal) res = 'ב' + shmmida + ' (' + str(int(mishkal)) + ' ' + food_units + ')' + ' של ' + y_common + ' יש ' mishkal = int(mishkal) mishkal = str(mishkal) mishkal = mishkal[::-1] title = 'ב' + shmmida + ' )' + mishkal + ' ' + food_units + '(' + ' של ' + y_common + ' יש ' title = title[:title.find(',')] res += str(val) + ' ' + units + ' ' + x rda_val, rda_units, rda_text, rda_status, nutrient, x_1 = get_rda(name_xy, tracker, intent_upper) if rda_val > 0 and units not in ['יחב"ל']: rda = 100 * float(val) / rda_val intent_upper_str = "המקסימלית" if intent_upper else "המומלצת" res += "\r" res += "שהם כ-%d אחוז מהקצובה היומית %s %s" % ( int(rda), intent_upper_str, get_personal_str(rda_status, tracker)) res += "\r" res += rda_text if rda_text else "" res = checkDoublePattern(res, 'קלוריות') res = res_timer(res, tracker) res = checkPrecentinres(res, x) if ' ' in y.strip(): title = 'ב' + y else: title = re.sub('[1234567890%)(]', '', title) title = title.replace('גרם', '') title = title.replace('גרמים', '') title = title.replace('יש', '') title = title[title.find('של') + 2:] title = re.sub(' ', '', title) title = title.strip() title = 'ב' + title title = title.strip() url = iniliatize_Diagram(title, data) dispatcher.utter_message(text="%s" % res, image=url) except Exception as e: res = "אין לי מושג, מצטער!" res = res_error(res, tracker, e) dispatcher.utter_message(text=res) return [SlotSet("x", x), SlotSet("y", y), SlotSet("previous_intent", "nutrition_howmanyxiny")] # ------------------------------------------------------------------ class ActionIsFoodHealthyQuestion(Action): def name(self) -> Text: return "action_nutrition_is_food_healthy" def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: try: tic() # db_dict = load_db_googleSheet(0x33) db_dict = get_tables('0x33') db_df = db_dict['tzameret'] lut_df = db_dict['lut'] common_df = db_dict['common_food'] food_ranges_df = db_dict['food_ranges'] food = "" food_entity = "" x = tracker.get_slot('x') if tracker.get_slot('x') else None if x is not None and x is not "": food = x food_entity = x else: for ent in tracker.latest_message.get('entities'): if ent['entity'] in lut_df[self.name()].values: food_entity = ent['value'] food = food_entity break if food in common_df.index: food = common_df[common_df.index == food]['shmmitzrach'][0] food = db_df[db_df['shmmitzrach'].str.contains(food)].iloc[0, :] _, nutrition_density_res = get_food_nutrition_density(food, food_ranges_df) advantages = [] disadvantages = [] for idx, row in food_ranges_df.iterrows(): if row["tzameret_name"]: if row["good_or_bad"] == "good": value = float(food[row["tzameret_name"]]) if idx == "Protein": threshold = 250 else: threshold = float(row["Medium - threshold per 100gr"]) if value > threshold: advantages.append(row["hebrew_name"]) elif row["good_or_bad"] == "bad": value = float(food[row["tzameret_name"]]) if idx == "Protein": threshold = 250 else: threshold = float(row["High - threshold per 100gr"]) if value > threshold: disadvantages.append(row["hebrew_name"]) nutrition_density_normalized = float(food["Nutrition density normalized"]) if nutrition_density_res == "low": res = "ב%s יש צפיפות תזונתית (רכיבים תזונתיים טובים ביחס לקלוריות) נמוכה" % food_entity elif nutrition_density_res == "med": res = "ב%s יש צפיפות תזונתית (רכיבים תזונתיים טובים ביחס לקלוריות) בינונית" % food_entity elif nutrition_density_res == "high": res = "ב%s יש צפיפות תזונתית (רכיבים תזונתיים טובים ביחס לקלוריות) גבוהה" % food_entity if disadvantages: res += ". " res += "החסרונות של %s הם הרבה %s" % (food_entity, ", ".join(disadvantages)) if advantages: res += ". " res += "היתרונות של %s הם הרבה %s" % (food_entity, ", ".join(advantages)) res = res_timer(res, tracker) dispatcher.utter_message(text="%s" % res) except Exception as e: res = "אין לי מושג, מצטער!" res = res_error(res, tracker, e) dispatcher.utter_message(text=res) return [SlotSet("previous_intent", "nutrition_is_food_healthy"), SlotSet("x", ""), SlotSet("y", "")] # ------------------------------------------------------------------ class ActionWhatIsHealthierQuestion(Action): def name(self) -> Text: return "action_nutrition_what_is_healthier" def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: try: tic() # db_dict = load_db_googleSheet(0x33) db_dict = get_tables('0x33') db_df = db_dict['tzameret'] lut_df = db_dict['lut'] common_df = db_dict['common_food'] food_ranges_df = db_dict['food_ranges'] user_msg = tracker.latest_message.get('text') food_entity1 = None food_entity2 = None for ent in tracker.latest_message.get('entities'): if ent['entity'] in lut_df[self.name() + "_x"].values: food_entity1 = ent['value'] elif ent['entity'] in lut_df[self.name() + "_y"].values: food_entity2 = ent['value'] if not food_entity2: regex_res = re.search('.* או (.*)', user_msg.replace('?', '')) if regex_res: food_entity2 = regex_res.group(1).strip() nutrition_density_cmp = [] advantages_cmp = [] disadvantages_cmp = [] for food_entity in (food_entity1, food_entity2): food = food_entity if food in common_df.index: food = common_df[common_df.index == food]['shmmitzrach'][0] food = db_df[db_df['shmmitzrach'].str.contains(food)].iloc[0, :] nutrition_density, _ = get_food_nutrition_density(food, food_ranges_df) advantages = [] disadvantages = [] for idx, row in food_ranges_df.iterrows(): if row["tzameret_name"]: if row["good_or_bad"] == "good": value = float(food[row["tzameret_name"]]) if idx == "Protein": threshold = 250 else: threshold = float(row["Medium - threshold per 100gr"]) if value > threshold: advantages.append(row["hebrew_name"]) elif row["good_or_bad"] == "bad": value = float(food[row["tzameret_name"]]) if idx == "Protein": threshold = 250 else: threshold = float(row["High - threshold per 100gr"]) if value > threshold: disadvantages.append(row["hebrew_name"]) nutrition_density_cmp.append(float(food["Nutrition density normalized"])) if disadvantages: res_temp = '*החסרונות של ' + food_entity + '*\n' res_temp += "%s" % (", ".join(disadvantages)) disadvantages_cmp.append(res_temp + '\n\n') if advantages: res_temp = '*היתרונות של ' + food_entity + '*\n' res_temp += "%s" % (", ".join(advantages)) advantages_cmp.append(res_temp + '\n\n') if nutrition_density_cmp[0] > nutrition_density_cmp[1]: res_temp = "לפי צפיפות תזונתית %s עדיף על פני %s\r" % (food_entity1, food_entity2) elif nutrition_density_cmp[0] < nutrition_density_cmp[1]: res_temp = "לפי צפיפות תזונתית %s עדיף על פני %s\r" % (food_entity2, food_entity1) else: res_temp = "לפי צפיפות תזונתית %s ו-%s שקולים\r" % (food_entity1, food_entity2) if nutrition_density_cmp[0] < nutrition_density_cmp[1]: advantages_cmp.reverse() disadvantages_cmp.reverse() res = res_temp res += '\n\n\n' for advantage in advantages_cmp: if advantage: res += "%s\n\r" % advantage for disadvantage in disadvantages_cmp: if disadvantage: res += "%s\n\r" % disadvantage res = res_timer(res, tracker) dispatcher.utter_message(text="%s" % res) except Exception as e: res = "אין לי מושג, מצטער!" res = res_error(res, tracker, e) dispatcher.utter_message(text=res) return [SlotSet("x", None), SlotSet("y", None), SlotSet("previous_intent", None)] # ------------------------------------------------------------------ class ActionWhatIsRecommendedQuestion(Action): def name(self) -> Text: return "action_nutrition_is_food_recommended" def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: try: tic() # db_dict = load_db_googleSheet(0x3b) db_dict = get_tables('0x3b') db_df = db_dict['tzameret'] lut_df = db_dict['lut'] food_qna_df = db_dict['food_qna'] common_df = db_dict['common_food'] food_ranges_df = db_dict['food_ranges'] for ent in tracker.latest_message.get('entities'): if ent['entity'] in lut_df[self.name()].values: food_entity = ent['value'] break food = food_entity if food in common_df.index: food = common_df[common_df.index == food]['shmmitzrach'][0] food = db_df[db_df['shmmitzrach'].str.contains(food)].iloc[0, :] _, nutrition_density_res = get_food_nutrition_density(food, food_ranges_df) _, nutrition_energy_res = get_food_energy_density(food, food_ranges_df) description_density_row = food_qna_df[(food_qna_df.index == nutrition_density_res) & (food_qna_df.energy_density == nutrition_energy_res)] res = description_density_row['description_density'].values[0] res = res.replace('var#food', food_entity) res = res_timer(res, tracker) dispatcher.utter_message(text="%s" % res) except Exception as e: res = "אין לי מושג, מצטער!" res = res_error(res, tracker, e) dispatcher.utter_message(text=res) return [SlotSet("x", None), SlotSet("y", None), SlotSet("previous_intent", None)] # ------------------------------------------------------------------ class ActionEatBeforeTrainingQuestion(Action): def name(self) -> Text: return "action_eat_before_training" def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: try: tic() # db_dict = load_db_googleSheet(0x10) db_dict = get_tables('0x10') custom_df = db_dict['common_food'] user_intent = tracker.latest_message.get('intent').get('name') training_type = tracker.get_slot("training_type") training_duration = tracker.get_slot("training_duration") if training_type == 'ריצת אינטרוולים': if training_duration: res = custom_df['Entity'][training_type + ' מעל ' + training_duration][0] else: res = custom_df['Entity'][training_type][0] res = res_timer(res, tracker) dispatcher.utter_message(text="%s" % res) except Exception as e: res = "אין לי מושג, מצטער!" res = res_error(res, tracker, e) dispatcher.utter_message(text=res) return [SlotSet("x", None), SlotSet("y", None), SlotSet("previous_intent", None)] # ------------------------------------------------------------------ class ActionBloodtestGenericQuestion(Action): def name(self) -> Text: return "action_nutrition_bloodtest_generic" def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: try: tic() # db_dict = load_db_googleSheet(0x102) db_dict = get_tables('0x102') lut_df = db_dict['lut'] bloodtest_df = db_dict['bloodtest_vals'] for ent in tracker.latest_message.get('entities'): if ent['entity'] in lut_df[self.name()].values: bloodtest_entity = ent['value'] break feature = db_dict['lut']['Entity'][bloodtest_entity] gender_str = "Male" if tracker.get_slot('gender') == "זכר": gender_str = "Male" elif tracker.get_slot('gender') == "נקבה": gender_str = "Female" age = float(tracker.get_slot('age') if tracker.get_slot('age') else "40") bloodtest_row = bloodtest_df[(bloodtest_df['Element'] == feature) & \ ((bloodtest_df['Gender'] == "ANY") | ( bloodtest_df['Gender'] == gender_str)) & \ ((bloodtest_df['Age min'] == "ANY") | ( bloodtest_df['Age min'].replace('ANY', -1).astype(float) <= age)) & \ ((bloodtest_df['Age Max'] == "ANY") | ( bloodtest_df['Age Max'].replace('ANY', -1).astype(float) > age))] bloodtest_type = int(bloodtest_row['Graph type'].values[0]) bloodtest_min = bloodtest_row['Min'].values[0] bloodtest_thr1 = bloodtest_row['Threshold 1'].values[0] bloodtest_thr2 = bloodtest_row['Threshold 2'].values[0] bloodtest_max = bloodtest_row['Max'].values[0] if bloodtest_type == 1: res = 'ערך תקין עבור בדיקת %s בין %.2f ועד %.2f, ערך מעל %.2f נחשב חריג' % ( bloodtest_entity, bloodtest_min, bloodtest_thr1, bloodtest_thr2) elif bloodtest_type == 2: res = 'ערך תקין עבור בדיקת %s בין %.2f ועד %.2f, ערך מתחת %.2f נחשב חריג' % ( bloodtest_entity, bloodtest_thr2, bloodtest_max, bloodtest_thr1) elif bloodtest_type == 3: res = 'ערך תקין עבור בדיקת %s בין %.2f ועד %.2f' % ( bloodtest_entity, bloodtest_thr1, bloodtest_thr2) res = res_timer(res, tracker) dispatcher.utter_message(text="%s" % res) except Exception as e: res = "אין לי מושג, מצטער!" res = res_error(res, tracker, e) dispatcher.utter_message(text=res) return [SlotSet("x", None), SlotSet("y", None), SlotSet("previous_intent", None)] # ------------------------------------------------------------------ class ActionBloodtestValueQuestion(Action): def name(self) -> Text: return "action_nutrition_bloodtest_value" def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: try: tic() # db_dict = load_db_googleSheet(0x102) db_dict = get_tables('0x102') lut_df = db_dict['lut'] bloodtest_df = db_dict['bloodtest_vals'] user_msg = tracker.latest_message.get('text') for ent in tracker.latest_message.get('entities'): if ent['entity'] in [x for x in lut_df[self.name()].values if x != 0]: if ent['entity'] == 'integer': val = ent['value'] else: bloodtest_entity = ent['value'] if not val: regex_res = re.search('האם (.*) הוא .*', user_msg.replace('?', '')) if regex_res: val = regex_res.group(1).strip() if not val: raise Exception() feature = db_dict['lut']['Entity'][bloodtest_entity] gender_str = "Male" if tracker.get_slot('gender') == "זכר": gender_str = "Male" elif tracker.get_slot('gender') == "נקבה": gender_str = "Female" age = float(tracker.get_slot('age') if tracker.get_slot('age') else "40") bloodtest_row = bloodtest_df[(bloodtest_df['Element'] == feature) & \ ((bloodtest_df['Gender'] == "ANY") | ( bloodtest_df['Gender'] == gender_str)) & \ ((bloodtest_df['Age min'] == "ANY") | ( bloodtest_df['Age min'].replace('ANY', -1).astype(float) <= age)) & \ ((bloodtest_df['Age Max'] == "ANY") | ( bloodtest_df['Age Max'].replace('ANY', -1).astype(float) > age))] bloodtest_type = int(bloodtest_row['Graph type'].values[0]) bloodtest_min = bloodtest_row['Min'].values[0] bloodtest_thr1 = bloodtest_row['Threshold 1'].values[0] bloodtest_thr2 = bloodtest_row['Threshold 2'].values[0] bloodtest_max = bloodtest_row['Max'].values[0] if bloodtest_type == 1: if bloodtest_min <= float(val) <= bloodtest_thr1: res = 'כן, זהו ערך תקין עבור בדיקת %s היות והוא נופל בטווח בין %.2f ועד %.2f. ערך מעל %.2f נחשב לחריג' % ( bloodtest_entity, bloodtest_min, bloodtest_thr1, bloodtest_thr2) else: res = 'לא, זהו אינו ערך תקין עבור בדיקת %s. ערך תקין הינו בטווח בין %.2f ועד %.2f. ערך מעל %.2f נחשב לחריג' % ( bloodtest_entity, bloodtest_min, bloodtest_thr1, bloodtest_thr2) elif bloodtest_type == 2: if bloodtest_thr2 <= float(val) <= bloodtest_max: res = 'כן, זהו ערך תקין עבור בדיקת %s היות והוא נופל בטווח בין %.2f ועד %.2f. ערך מתחת %.2f נחשב לחריג' % ( bloodtest_entity, bloodtest_thr2, bloodtest_max, bloodtest_thr1) else: res = 'לא, זהו אינו ערך תקין עבור בדיקת %s. ערך תקין הינו בטווח בין %.2f ועד %.2f. ערך מתחת %.2f נחשב לחריג' % ( bloodtest_entity, bloodtest_thr2, bloodtest_max, bloodtest_thr1) elif bloodtest_type == 3: if bloodtest_thr1 <= float(val) <= bloodtest_thr2: res = 'כן, זהו ערך תקין עבור בדיקת %s היות והוא נופל בטווח בין %.2f ועד %.2f' % ( bloodtest_entity, bloodtest_thr1, bloodtest_thr2) else: res = 'לא, זהו אינו ערך תקין עבור בדיקת %s. ערך תקין הינו בטווח בין %.2f ועד %.2f.' % ( bloodtest_entity, bloodtest_thr1, bloodtest_thr2) else: raise Exception() res = res_timer(res, tracker) dispatcher.utter_message(text="%s" % res) except Exception as e: res = "אין לי מושג, מצטער!" res = res_error(res, tracker, e) dispatcher.utter_message(text=res) return [SlotSet("x", None), SlotSet("y", None), SlotSet("previous_intent", None)] # ------------------------------------------------------------------ class ActionFoodSubstituteQuestion(Action): def name(self) -> Text: return "action_nutrition_food_substitute" def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: try: tic() # db_dict = load_db_googleSheet(0xc33) db_dict = get_tables('0xc33') db_df = db_dict['tzameret'] lut_df = db_dict['lut'] features_df = db_dict['food_units_features'] common_df = db_dict['common_food'] food_ranges_df = db_dict['food_ranges'] subs_tags_alias_df = db_dict['subs_tags_alias'] features_df = features_df.drop(index=0) user_msg = tracker.latest_message.get('text') food_entity = "" for ent in tracker.latest_message.get('entities'): if ent['entity'] in lut_df[self.name()].values: food_entity = ent['value'] break if food_entity == "" or food_entity is None: prediction = tracker.latest_message food_entity = prediction['entities'][0]['value'] tzameret_groups_lut = {} tzameret_groups_lut['1'] = ['1', '4'] # Milk tzameret_groups_lut['2'] = ['1', '2', '3', '4'] # Meat tzameret_groups_lut['3'] = ['1', '2', '3', '4'] # Eggs tzameret_groups_lut['4'] = ['1', '4'] # Dairy tzameret_groups_lut['5'] = ['5', '6', '7', '9'] # Snacks tzameret_groups_lut['6'] = ['5', '6', '7', '9'] # Fruits tzameret_groups_lut['7'] = ['5', '6', '7', '9'] # Vegetables tzameret_groups_lut['8'] = ['8', '4'] # Fat tzameret_groups_lut['9'] = ['5', '6', '7', '9'] # Beverages food_energy_thr = 0.05 def get_advantages(food): advantages = [] for idx, row in food_ranges_df.iterrows(): if row["tzameret_name"] and row["tzameret_name"] in food: if row["good_or_bad"] == "good": value = float(food[row["tzameret_name"]]) if idx == "Protein": threshold = 250 else: threshold = float(row["Medium - threshold per 100gr"]) if value > threshold: advantages.append(row["hebrew_name"]) return advantages def get_advantages_score(food): act = food['advantages'] ref = ast.literal_eval(food['advantages_ref']) intersection = [] if isinstance(act, list) and isinstance(ref, list): intersection = list(set(act) & set(ref)) return len(intersection) food = food_entity if food in common_df.index: food = common_df[common_df.index == food]['shmmitzrach'][0] food_tzameret = db_df[db_df['shmmitzrach'].str.contains(food)].iloc[0, :] tzameret_code = int(food_tzameret['smlmitzrach']) tzameret_code_msb = food_tzameret['smlmitzrach'][0] food_energy = food_tzameret['food_energy'] food_features = features_df[features_df['smlmitzrach'].fillna(0).astype(int) == tzameret_code] user_msg_feature_v = [] user_msg_feature_k = list( set(subs_tags_alias_df.index.to_list()) & set(user_msg.replace(',', '').split(" "))) for tag in user_msg_feature_k: tag_df = subs_tags_alias_df[subs_tags_alias_df.index == tag]['Entity'] if tag_df.any: user_msg_feature_v.append(tag_df.values[0]) food_filter_1 = db_df[db_df['smlmitzrach'].str[0].isin(tzameret_groups_lut[tzameret_code_msb])] food_filter_2 = db_df[abs(db_df['food_energy'] - food_energy) / food_energy < food_energy_thr] food_filter_1_2 = pd.merge(food_filter_1, food_filter_2, how='inner') food_filter_1_2['smlmitzrach'] = food_filter_1_2['smlmitzrach'].astype(float) features_df['smlmitzrach'] = features_df['smlmitzrach'].astype(float) food_filter = features_df[features_df['smlmitzrach'].isin(food_filter_1_2['smlmitzrach'].to_list())] food_filter = food_filter[~food_filter['Food_Name'].str.contains(food_entity)] for tag in user_msg_feature_v: food_filter = food_filter[food_filter[tag] == 'Yes'] food_filter = food_filter.reset_index(drop=True) if food_features.empty: food_filter['features_score'] = 0 else: food_features_compact = food_features.iloc[:, 5:-4] food_filter_compact = food_filter.iloc[:, 5:-4].reset_index(drop=True) food_features_compact_shaped = pd.DataFrame( np.repeat(food_features_compact.values, len(food_filter_compact), axis=0)) food_features_compact_shaped.reset_index(drop=True) food_features_compact_shaped.columns = food_features_compact.columns food_features_score_df = (food_filter_compact == food_features_compact_shaped).astype(int) food_filter['features_score'] = food_features_score_df.sum(axis=1) food_advantages = get_advantages(food_tzameret) food_filter['advantages'] = food_filter_1_2.apply(get_advantages, axis=1) food_filter['advantages_ref'] = str(food_advantages) food_filter['advantages_score'] = food_filter.apply(get_advantages_score, axis=1) food_filter = food_filter.sort_values(['features_score', 'advantages_score'], ascending=False) res = "להלן 5 התחליפים הקרובים ביותר עבור %s" % food_entity res += "\n" res += '\n'.join(list(food_filter['Food_Name'].values[:5])) res = res_timer(res, tracker) dispatcher.utter_message(text="%s" % res) except Exception as e: res = "אין לי מושג, מצטער!" res = res_error(res, tracker, e) dispatcher.utter_message(text=res) return [SlotSet("x", None), SlotSet("y", None), SlotSet("previous_intent", None)] # ------------------------------------------------------------------ class ActionPersonalizationList(Action): def name(self) -> Text: return "action_personlization_list" def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: try: pkl_db = './persons.pkl' if path.exists(pkl_db): df = pd.read_pickle(pkl_db) dispatcher.utter_message(text="%s" % df.to_string()) except Exception as e: res = "אין לי מושג, מצטער!" res = res_error(res, tracker, e) dispatcher.utter_message(text=res) return [SlotSet("x", None), SlotSet("y", None), SlotSet("previous_intent", None)] # ------------------------------------------------------------------ class ActionPersonalizationRemove(Action): def name(self) -> Text: return "action_personlization_remove" def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: try: pkl_db = './persons.pkl' if path.exists(pkl_db): df = pd.read_pickle(pkl_db) phone_slot = tracker.get_slot("phone") if phone_slot in df.index: df = df.drop(tracker.get_slot("phone")) df.to_pickle(pkl_db) dispatcher.utter_message(text="רישומך הוסר מן המערכת") else: dispatcher.utter_message(text="אינך מופיע במערכת, לכן אין צורך בהסרת רישום") except Exception as e: res = "אין לי מושג, מצטער!" res = res_error(res, tracker, e) dispatcher.utter_message(text=res) return [SlotSet("x", None), SlotSet("y", None), SlotSet("previous_intent", None)] # ------------------------------------------------------------------ class ProfileFormValidator(FormValidationAction): """ProfileForm Validator""" def name(self) -> Text: return "validate_profile_form" # -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- async def required_slots( self, slots_mapped_in_domain: List[Text], dispatcher: "CollectingDispatcher", tracker: "Tracker", domain: "DomainDict", ) -> Optional[List[Text]]: required_slots = ["phone", "username", "gender", "age", "weight", "height"] return required_slots # -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- def slot_mappings(self) -> Dict[Text, Union[Dict, List[Dict]]]: """A dictionary to map required slots to - an extracted entity - intent: value pairs - a whole message or a list of them, where a first match will be picked""" return { "phone": [ self.from_entity(entity="integer", role="phone"), self.from_entity(entity="integer"), self.from_text(), ], "username": [ self.from_entity(entity="name"), self.from_text(), ], "gender": [ self.from_entity(entity="gender"), ], "age": [ self.from_entity(entity="integer", role="age"), self.from_entity(entity="integer"), self.from_text(), ], "weight": [ self.from_entity(entity="integer", role="weight"), self.from_entity(entity="integer"), self.from_text(), ], "height": [ self.from_entity(entity="integer", role="height"), self.from_entity(entity="integer"), self.from_text(), ], } # -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- def validate_phone( self, value: Text, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any], ) -> Dict[Text, Any]: """Validate phone value.""" requested_slot = tracker.get_slot("requested_slot") phone_slot = tracker.get_slot("phone") phone_value = None if requested_slot == "phone": phone_value = value.replace('-', '').replace(' ', '') pkl_db = './persons.pkl' if path.exists(pkl_db): df = pd.read_pickle(pkl_db) if phone_value in df.index: dispatcher.utter_message( text="פרטיך נטענו בהצלחה, ברוכים השבים %s" % df.loc[phone_value].username) return {'phone': phone_value, 'username': df.loc[phone_value].username, 'gender': df.loc[phone_value].gender, 'age': df.loc[phone_value].age, 'weight': df.loc[phone_value].weight, 'height': df.loc[phone_value].height} else: df = pd.DataFrame(columns=["username", "gender", "age", "weight", "height"]) df.to_pickle(pkl_db) elif phone_slot: phone_value = phone_slot return {"phone": phone_value} # -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- def validate_username( self, value: Text, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any], ) -> Dict[Text, Any]: """Validate username value.""" requested_slot = tracker.get_slot("requested_slot") username_slot = tracker.get_slot("username") username_value = None if requested_slot == "username": username_value = value elif username_slot: username_value = username_slot return {"username": username_value} # -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- def validate_gender( self, value: Text, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any], ) -> Dict[Text, Any]: """Validate gender value.""" requested_slot = tracker.get_slot("requested_slot") gender_slot = tracker.get_slot("gender") gender_value = None if requested_slot == "gender": gender_value = value elif gender_slot: gender_value = gender_slot return {"gender": gender_value} # -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- def validate_age( self, value: Text, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any], ) -> Dict[Text, Any]: """Validate age value.""" requested_slot = tracker.get_slot("requested_slot") age_slot = tracker.get_slot("age") age_value = None if requested_slot == "age": age_value = value elif age_slot: age_value = age_slot return {"age": age_value} # -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- def validate_weight( self, value: Text, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any], ) -> Dict[Text, Any]: """Validate weight value.""" requested_slot = tracker.get_slot("requested_slot") weight_slot = tracker.get_slot("weight") weight_value = None if requested_slot == "weight": weight_value = value elif weight_slot: weight_value = weight_slot return {"weight": weight_value} # -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- def validate_height( self, value: Text, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any], ) -> Dict[Text, Any]: """Validate height value.""" requested_slot = tracker.get_slot("requested_slot") height_slot = tracker.get_slot("height") height_value = None if requested_slot == "height": height_value = value pkl_db = './persons.pkl' if path.exists(pkl_db): df = pd.read_pickle(pkl_db) phone_value = tracker.get_slot("phone") if phone_value not in df.index: df.loc[phone_value] = [tracker.get_slot("username"), tracker.get_slot("gender"), tracker.get_slot("age"), tracker.get_slot("weight"), height_value] df.to_pickle(pkl_db) dispatcher.utter_message(text="פרטיך נרשמו במערכת, לטובת כניסה מהירה יותר בפעם הבאה, תודה.") elif height_slot: height_value = height_slot return {"height": height_value} # -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- def submit( self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any], ) -> List[Dict]: """ Define what the form has to do after all required slots are filled""" # utter submit template dispatcher.utter_message(text="מה נעשה היום?") return []
base_test.py
# -*- coding: utf-8 -*- import contextlib import copy import datetime import json import threading import elasticsearch import mock import pytest from elasticsearch.exceptions import ElasticsearchException from elastalert.enhancements import BaseEnhancement from elastalert.kibana import dashboard_temp from elastalert.util import dt_to_ts from elastalert.util import dt_to_unix from elastalert.util import dt_to_unixms from elastalert.util import EAException from elastalert.util import ts_to_dt from elastalert.util import unix_to_dt START_TIMESTAMP = '2014-09-26T12:34:45Z' END_TIMESTAMP = '2014-09-27T12:34:45Z' START = ts_to_dt(START_TIMESTAMP) END = ts_to_dt(END_TIMESTAMP) def _set_hits(ea_inst, hits): res = {'hits': {'hits': hits}} ea_inst.client_es.return_value = res def generate_hits(timestamps, **kwargs): hits = [] id_iter = xrange(len(timestamps)).__iter__() for ts in timestamps: data = {'_id': 'id' + str(id_iter.next()), '_source': {'@timestamp': ts}, '_type': 'logs'} for key, item in kwargs.iteritems(): data['_source'][key] = item hits.append(data) return {'hits': {'hits': hits}} def assert_alerts(ea_inst, calls): """ Takes a list of lists of timestamps. Asserts that an alert was called for each list, containing those timestamps. """ assert ea_inst.rules[0]['alert'][0].alert.call_count == len(calls) for call_num, call_args in enumerate(ea_inst.rules[0]['alert'][0].alert.call_args_list): assert not any([match['@timestamp'] not in calls[call_num] for match in call_args[0][0]]) assert len(call_args[0][0]) == len(calls[call_num]) def test_starttime(ea): invalid = ['2014-13-13', '2014-11-24T30:00:00', 'Not A Timestamp'] for ts in invalid: with pytest.raises((TypeError, ValueError)): ts_to_dt(ts) def test_init_rule(ea): # Simulate state of a rule just loaded from a file ea.rules[0]['minimum_starttime'] = datetime.datetime.now() new_rule = copy.copy(ea.rules[0]) map(new_rule.pop, ['agg_matches', 'current_aggregate_id', 'processed_hits', 'minimum_starttime']) # Properties are copied from ea.rules[0] ea.rules[0]['starttime'] = '2014-01-02T00:11:22' ea.rules[0]['processed_hits'] = ['abcdefg'] new_rule = ea.init_rule(new_rule, False) for prop in ['starttime', 'agg_matches', 'current_aggregate_id', 'processed_hits', 'minimum_starttime']: assert new_rule[prop] == ea.rules[0][prop] # Properties are fresh new_rule = ea.init_rule(new_rule, True) new_rule.pop('starttime') assert 'starttime' not in new_rule assert new_rule['processed_hits'] == {} def test_query(ea): ea.current_es.search.return_value = {'hits': {'hits': []}} ea.run_query(ea.rules[0], START, END) ea.current_es.search.assert_called_with(body={'filter': {'bool': {'must': [{'range': {'@timestamp': {'lte': END_TIMESTAMP, 'gt': START_TIMESTAMP}}}]}}, 'sort': [{'@timestamp': {'order': 'asc'}}]}, index='idx', _source_include=['@timestamp'], ignore_unavailable=True, size=100000) def test_query_with_fields(ea): ea.rules[0]['_source_enabled'] = False ea.current_es.search.return_value = {'hits': {'hits': []}} ea.run_query(ea.rules[0], START, END) ea.current_es.search.assert_called_with(body={'filter': {'bool': {'must': [{'range': {'@timestamp': {'lte': END_TIMESTAMP, 'gt': START_TIMESTAMP}}}]}}, 'sort': [{'@timestamp': {'order': 'asc'}}], 'fields': ['@timestamp']}, index='idx', ignore_unavailable=True, size=100000) def test_query_with_unix(ea): ea.rules[0]['timestamp_type'] = 'unix' ea.rules[0]['dt_to_ts'] = dt_to_unix ea.current_es.search.return_value = {'hits': {'hits': []}} ea.run_query(ea.rules[0], START, END) start_unix = dt_to_unix(START) end_unix = dt_to_unix(END) ea.current_es.search.assert_called_with(body={'filter': {'bool': {'must': [{'range': {'@timestamp': {'lte': end_unix, 'gt': start_unix}}}]}}, 'sort': [{'@timestamp': {'order': 'asc'}}]}, index='idx', _source_include=['@timestamp'], ignore_unavailable=True, size=100000) def test_query_with_unixms(ea): ea.rules[0]['timestamp_type'] = 'unixms' ea.rules[0]['dt_to_ts'] = dt_to_unixms ea.current_es.search.return_value = {'hits': {'hits': []}} ea.run_query(ea.rules[0], START, END) start_unix = dt_to_unixms(START) end_unix = dt_to_unixms(END) ea.current_es.search.assert_called_with(body={'filter': {'bool': {'must': [{'range': {'@timestamp': {'lte': end_unix, 'gt': start_unix}}}]}}, 'sort': [{'@timestamp': {'order': 'asc'}}]}, index='idx', _source_include=['@timestamp'], ignore_unavailable=True, size=100000) def test_no_hits(ea): ea.current_es.search.return_value = {'hits': {'hits': []}} ea.run_query(ea.rules[0], START, END) assert ea.rules[0]['type'].add_data.call_count == 0 def test_no_terms_hits(ea): ea.rules[0]['use_terms_query'] = True ea.rules[0]['query_key'] = 'QWERTY' ea.rules[0]['doc_type'] = 'uiop' ea.current_es.search.return_value = {'hits': {'hits': []}} ea.run_query(ea.rules[0], START, END) assert ea.rules[0]['type'].add_terms_data.call_count == 0 def test_some_hits(ea): hits = generate_hits([START_TIMESTAMP, END_TIMESTAMP]) hits_dt = generate_hits([START, END]) ea.current_es.search.return_value = hits ea.run_query(ea.rules[0], START, END) assert ea.rules[0]['type'].add_data.call_count == 1 ea.rules[0]['type'].add_data.assert_called_with([x['_source'] for x in hits_dt['hits']['hits']]) def test_some_hits_unix(ea): ea.rules[0]['timestamp_type'] = 'unix' ea.rules[0]['dt_to_ts'] = dt_to_unix ea.rules[0]['ts_to_dt'] = unix_to_dt hits = generate_hits([dt_to_unix(START), dt_to_unix(END)]) hits_dt = generate_hits([START, END]) ea.current_es.search.return_value = copy.deepcopy(hits) ea.run_query(ea.rules[0], START, END) assert ea.rules[0]['type'].add_data.call_count == 1 ea.rules[0]['type'].add_data.assert_called_with([x['_source'] for x in hits_dt['hits']['hits']]) def _duplicate_hits_generator(timestamps, **kwargs): """Generator repeatedly returns identical hits dictionaries """ while True: yield generate_hits(timestamps, **kwargs) def test_duplicate_timestamps(ea): ea.current_es.search.side_effect = _duplicate_hits_generator([START_TIMESTAMP] * 3, blah='duplicate') ea.run_query(ea.rules[0], START, ts_to_dt('2014-01-01T00:00:00Z')) assert len(ea.rules[0]['type'].add_data.call_args_list[0][0][0]) == 3 assert ea.rules[0]['type'].add_data.call_count == 1 # Run the query again, duplicates will be removed and not added ea.run_query(ea.rules[0], ts_to_dt('2014-01-01T00:00:00Z'), END) assert ea.rules[0]['type'].add_data.call_count == 1 def test_match(ea): hits = generate_hits([START_TIMESTAMP, END_TIMESTAMP]) ea.current_es.search.return_value = hits ea.rules[0]['type'].matches = [{'@timestamp': END}] with mock.patch('elastalert.elastalert.Elasticsearch'): ea.run_rule(ea.rules[0], END, START) ea.rules[0]['alert'][0].alert.called_with({'@timestamp': END_TIMESTAMP}) assert ea.rules[0]['alert'][0].alert.call_count == 1 def test_run_rule_calls_garbage_collect(ea): start_time = '2014-09-26T00:00:00Z' end_time = '2014-09-26T12:00:00Z' ea.buffer_time = datetime.timedelta(hours=1) ea.run_every = datetime.timedelta(hours=1) with contextlib.nested(mock.patch.object(ea.rules[0]['type'], 'garbage_collect'), mock.patch.object(ea, 'run_query')) as (mock_gc, mock_get_hits): ea.run_rule(ea.rules[0], ts_to_dt(end_time), ts_to_dt(start_time)) # Running elastalert every hour for 12 hours, we should see self.garbage_collect called 12 times. assert mock_gc.call_count == 12 # The calls should be spaced 1 hour apart expected_calls = [ts_to_dt(start_time) + datetime.timedelta(hours=i) for i in range(1, 13)] for e in expected_calls: mock_gc.assert_any_call(e) def run_rule_query_exception(ea, mock_es): with mock.patch('elastalert.elastalert.Elasticsearch') as mock_es_init: mock_es_init.return_value = mock_es ea.run_rule(ea.rules[0], END, START) # Assert neither add_data nor garbage_collect were called # and that starttime did not change assert ea.rules[0].get('starttime') == START assert ea.rules[0]['type'].add_data.call_count == 0 assert ea.rules[0]['type'].garbage_collect.call_count == 0 assert ea.rules[0]['type'].add_count_data.call_count == 0 def test_query_exception(ea): mock_es = mock.Mock() mock_es.search.side_effect = ElasticsearchException run_rule_query_exception(ea, mock_es) def test_query_exception_count_query(ea): ea.rules[0]['use_count_query'] = True ea.rules[0]['doc_type'] = 'blahblahblahblah' mock_es = mock.Mock() mock_es.count.side_effect = ElasticsearchException run_rule_query_exception(ea, mock_es) def test_match_with_module(ea): mod = BaseEnhancement(ea.rules[0]) mod.process = mock.Mock() ea.rules[0]['match_enhancements'] = [mod] test_match(ea) mod.process.assert_called_with({'@timestamp': END}) def test_agg(ea): ea.max_aggregation = 1337 hits_timestamps = ['2014-09-26T12:34:45', '2014-09-26T12:40:45', '2014-09-26T12:47:45'] alerttime1 = dt_to_ts(ts_to_dt(hits_timestamps[0]) + datetime.timedelta(minutes=10)) hits = generate_hits(hits_timestamps) ea.current_es.search.return_value = hits with mock.patch('elastalert.elastalert.Elasticsearch'): # Aggregate first two, query over full range ea.rules[0]['aggregation'] = datetime.timedelta(minutes=10) ea.rules[0]['type'].matches = [{'@timestamp': h} for h in hits_timestamps] ea.run_rule(ea.rules[0], END, START) # Assert that the three matches were added to elasticsearch call1 = ea.writeback_es.create.call_args_list[0][1]['body'] call2 = ea.writeback_es.create.call_args_list[1][1]['body'] call3 = ea.writeback_es.create.call_args_list[2][1]['body'] assert call1['match_body'] == {'@timestamp': '2014-09-26T12:34:45'} assert not call1['alert_sent'] assert 'aggregate_id' not in call1 assert call1['alert_time'] == alerttime1 assert call2['match_body'] == {'@timestamp': '2014-09-26T12:40:45'} assert not call2['alert_sent'] assert call2['aggregate_id'] == 'ABCD' assert call3['match_body'] == {'@timestamp': '2014-09-26T12:47:45'} assert not call3['alert_sent'] assert 'aggregate_id' not in call3 # First call - Find all pending alerts # Second call - Find matches with agg_id == 'ABCD' # Third call - Find matches with agg_id == 'CDEF' ea.writeback_es.search.side_effect = [{'hits': {'hits': [{'_id': 'ABCD', '_source': call1}, {'_id': 'BCDE', '_source': call2}, {'_id': 'CDEF', '_source': call3}]}}, {'hits': {'hits': [{'_id': 'BCDE', '_source': call2}]}}, {'hits': {'hits': []}}] with mock.patch('elastalert.elastalert.Elasticsearch') as mock_es: ea.send_pending_alerts() # Assert that current_es was refreshed from the aggregate rules assert mock_es.called_with(host='', port='') assert mock_es.call_count == 2 assert_alerts(ea, [hits_timestamps[:2], hits_timestamps[2:]]) call1 = ea.writeback_es.search.call_args_list[6][1]['body'] call2 = ea.writeback_es.search.call_args_list[7][1]['body'] call3 = ea.writeback_es.search.call_args_list[8][1]['body'] assert 'alert_time' in call1['filter']['range'] assert call2['query']['query_string']['query'] == 'aggregate_id:ABCD' assert call3['query']['query_string']['query'] == 'aggregate_id:CDEF' assert ea.writeback_es.search.call_args_list[7][1]['size'] == 1337 def test_agg_no_writeback_connectivity(ea): """ Tests that if writeback_es throws an exception, the matches will be added to 'agg_matches' and when run again, that they will be passed again to add_aggregated_alert """ hit1, hit2, hit3 = '2014-09-26T12:34:45', '2014-09-26T12:40:45', '2014-09-26T12:47:45' hits = generate_hits([hit1, hit2, hit3]) ea.current_es.search.return_value = hits ea.rules[0]['aggregation'] = datetime.timedelta(minutes=10) ea.rules[0]['type'].matches = [{'@timestamp': hit1}, {'@timestamp': hit2}, {'@timestamp': hit3}] ea.writeback_es.create.side_effect = elasticsearch.exceptions.ElasticsearchException('Nope') with mock.patch('elastalert.elastalert.Elasticsearch'): ea.run_rule(ea.rules[0], END, START) assert ea.rules[0]['agg_matches'] == [{'@timestamp': hit1}, {'@timestamp': hit2}, {'@timestamp': hit3}] ea.current_es.search.return_value = {'hits': {'hits': []}} ea.add_aggregated_alert = mock.Mock() with mock.patch('elastalert.elastalert.Elasticsearch'): ea.run_rule(ea.rules[0], END, START) ea.add_aggregated_alert.assert_any_call({'@timestamp': hit1}, ea.rules[0]) ea.add_aggregated_alert.assert_any_call({'@timestamp': hit2}, ea.rules[0]) ea.add_aggregated_alert.assert_any_call({'@timestamp': hit3}, ea.rules[0]) def test_silence(ea): # Silence test rule for 4 hours ea.args.rule = 'test_rule.yaml' # Not a real name, just has to be set ea.args.silence = 'hours=4' ea.silence() # Don't alert even with a match match = [{'@timestamp': '2014-11-17T00:00:00'}] ea.rules[0]['type'].matches = match with mock.patch('elastalert.elastalert.Elasticsearch'): ea.run_rule(ea.rules[0], END, START) assert ea.rules[0]['alert'][0].alert.call_count == 0 # Mock ts_now() to +5 hours, alert on match match = [{'@timestamp': '2014-11-17T00:00:00'}] ea.rules[0]['type'].matches = match with mock.patch('elastalert.elastalert.ts_now') as mock_ts: with mock.patch('elastalert.elastalert.Elasticsearch'): # Converted twice to add tzinfo mock_ts.return_value = ts_to_dt(dt_to_ts(datetime.datetime.utcnow() + datetime.timedelta(hours=5))) ea.run_rule(ea.rules[0], END, START) assert ea.rules[0]['alert'][0].alert.call_count == 1 def test_compound_query_key(ea): ea.rules[0]['query_key'] = 'this,that,those' ea.rules[0]['compound_query_key'] = ['this', 'that', 'those'] hits = generate_hits([START_TIMESTAMP, END_TIMESTAMP], this='abc', that=u'☃', those=4) ea.current_es.search.return_value = hits ea.run_query(ea.rules[0], START, END) call_args = ea.rules[0]['type'].add_data.call_args_list[0] assert 'this,that,those' in call_args[0][0][0] assert call_args[0][0][0]['this,that,those'] == u'abc, ☃, 4' def test_silence_query_key(ea): # Silence test rule for 4 hours ea.args.rule = 'test_rule.yaml' # Not a real name, just has to be set ea.args.silence = 'hours=4' ea.silence() # Don't alert even with a match match = [{'@timestamp': '2014-11-17T00:00:00', 'username': 'qlo'}] ea.rules[0]['type'].matches = match ea.rules[0]['query_key'] = 'username' with mock.patch('elastalert.elastalert.Elasticsearch'): ea.run_rule(ea.rules[0], END, START) assert ea.rules[0]['alert'][0].alert.call_count == 0 # Mock ts_now() to +5 hours, alert on match match = [{'@timestamp': '2014-11-17T00:00:00', 'username': 'qlo'}] ea.rules[0]['type'].matches = match with mock.patch('elastalert.elastalert.ts_now') as mock_ts: with mock.patch('elastalert.elastalert.Elasticsearch'): # Converted twice to add tzinfo mock_ts.return_value = ts_to_dt(dt_to_ts(datetime.datetime.utcnow() + datetime.timedelta(hours=5))) ea.run_rule(ea.rules[0], END, START) assert ea.rules[0]['alert'][0].alert.call_count == 1 def test_realert(ea): hits = ['2014-09-26T12:35:%sZ' % (x) for x in range(60)] matches = [{'@timestamp': x} for x in hits] ea.current_es.search.return_value = hits with mock.patch('elastalert.elastalert.Elasticsearch'): ea.rules[0]['realert'] = datetime.timedelta(seconds=50) ea.rules[0]['type'].matches = matches ea.run_rule(ea.rules[0], END, START) assert ea.rules[0]['alert'][0].alert.call_count == 1 # Doesn't alert again matches = [{'@timestamp': x} for x in hits] with mock.patch('elastalert.elastalert.Elasticsearch'): ea.run_rule(ea.rules[0], END, START) ea.rules[0]['type'].matches = matches assert ea.rules[0]['alert'][0].alert.call_count == 1 # mock ts_now() to past the realert time matches = [{'@timestamp': hits[0]}] with mock.patch('elastalert.elastalert.ts_now') as mock_ts: with mock.patch('elastalert.elastalert.Elasticsearch'): # mock_ts is converted twice to add tzinfo mock_ts.return_value = ts_to_dt(dt_to_ts(datetime.datetime.utcnow() + datetime.timedelta(minutes=10))) ea.rules[0]['type'].matches = matches ea.run_rule(ea.rules[0], END, START) assert ea.rules[0]['alert'][0].alert.call_count == 2 def test_realert_with_query_key(ea): ea.rules[0]['query_key'] = 'username' ea.rules[0]['realert'] = datetime.timedelta(minutes=10) # Alert and silence username: qlo match = [{'@timestamp': '2014-11-17T00:00:00', 'username': 'qlo'}] ea.rules[0]['type'].matches = match with mock.patch('elastalert.elastalert.Elasticsearch'): ea.run_rule(ea.rules[0], END, START) assert ea.rules[0]['alert'][0].alert.call_count == 1 # Dont alert again for same username match = [{'@timestamp': '2014-11-17T00:05:00', 'username': 'qlo'}] ea.rules[0]['type'].matches = match with mock.patch('elastalert.elastalert.Elasticsearch'): ea.run_rule(ea.rules[0], END, START) assert ea.rules[0]['alert'][0].alert.call_count == 1 # Do alert with a different value match = [{'@timestamp': '2014-11-17T00:05:00', 'username': ''}] ea.rules[0]['type'].matches = match with mock.patch('elastalert.elastalert.Elasticsearch'): ea.run_rule(ea.rules[0], END, START) assert ea.rules[0]['alert'][0].alert.call_count == 2 # Alert with query_key missing match = [{'@timestamp': '2014-11-17T00:05:00'}] ea.rules[0]['type'].matches = match with mock.patch('elastalert.elastalert.Elasticsearch'): ea.run_rule(ea.rules[0], END, START) assert ea.rules[0]['alert'][0].alert.call_count == 3 # Still alert with a different value match = [{'@timestamp': '2014-11-17T00:05:00', 'username': 'ghengis_khan'}] ea.rules[0]['type'].matches = match with mock.patch('elastalert.elastalert.Elasticsearch'): ea.run_rule(ea.rules[0], END, START) assert ea.rules[0]['alert'][0].alert.call_count == 4 def test_realert_with_nested_query_key(ea): ea.rules[0]['query_key'] = 'user.name' ea.rules[0]['realert'] = datetime.timedelta(minutes=10) # Alert and silence username: qlo match = [{'@timestamp': '2014-11-17T00:00:00', 'user': {'name': 'qlo'}}] ea.rules[0]['type'].matches = match with mock.patch('elastalert.elastalert.Elasticsearch'): ea.run_rule(ea.rules[0], END, START) assert ea.rules[0]['alert'][0].alert.call_count == 1 # Dont alert again for same username match = [{'@timestamp': '2014-11-17T00:05:00', 'user': {'name': 'qlo'}}] ea.rules[0]['type'].matches = match with mock.patch('elastalert.elastalert.Elasticsearch'): ea.run_rule(ea.rules[0], END, START) assert ea.rules[0]['alert'][0].alert.call_count == 1 def test_count(ea): ea.rules[0]['use_count_query'] = True ea.rules[0]['doc_type'] = 'doctype' with mock.patch('elastalert.elastalert.Elasticsearch'): ea.run_rule(ea.rules[0], END, START) # Assert that es.count is run against every run_every timeframe between START and END start = START query = {'query': {'filtered': {'filter': {'bool': {'must': [{'range': {'@timestamp': {'lte': END_TIMESTAMP, 'gt': START_TIMESTAMP}}}]}}}}} while END - start > ea.run_every: end = start + ea.run_every query['query']['filtered']['filter']['bool']['must'][0]['range']['@timestamp']['lte'] = dt_to_ts(end) query['query']['filtered']['filter']['bool']['must'][0]['range']['@timestamp']['gt'] = dt_to_ts(start) start = start + ea.run_every ea.current_es.count.assert_any_call(body=query, doc_type='doctype', index='idx', ignore_unavailable=True) def run_and_assert_segmented_queries(ea, start, end, segment_size): with mock.patch.object(ea, 'run_query') as mock_run_query: ea.run_rule(ea.rules[0], end, start) original_end, original_start = end, start for call_args in mock_run_query.call_args_list: end = min(start + segment_size, original_end) assert call_args[0][1:3] == (start, end) start += segment_size # Assert elastalert_status was created for the entire time range assert ea.writeback_es.create.call_args_list[-1][1]['body']['starttime'] == dt_to_ts(original_start) assert ea.writeback_es.create.call_args_list[-1][1]['body']['endtime'] == dt_to_ts(original_end) def test_query_segmenting(ea): # buffer_time segments with normal queries ea.rules[0]['buffer_time'] = datetime.timedelta(minutes=53) mock_es = mock.Mock() mock_es.search.side_effect = _duplicate_hits_generator([START_TIMESTAMP]) with mock.patch('elastalert.elastalert.Elasticsearch') as mock_es_init: mock_es_init.return_value = mock_es run_and_assert_segmented_queries(ea, START, END, ea.rules[0]['buffer_time']) # Assert that num_hits correctly includes the 1 hit per query assert ea.num_hits == ea.current_es.search.call_count # run_every segments with count queries ea.rules[0]['use_count_query'] = True with mock.patch('elastalert.elastalert.Elasticsearch'): run_and_assert_segmented_queries(ea, START, END, ea.run_every) # run_every segments with terms queries ea.rules[0].pop('use_count_query') ea.rules[0]['use_terms_query'] = True with mock.patch('elastalert.elastalert.Elasticsearch'): run_and_assert_segmented_queries(ea, START, END, ea.run_every) def test_get_starttime(ea): endtime = '2015-01-01T00:00:00Z' mock_es = mock.Mock() mock_es.search.return_value = {'hits': {'hits': [{'_source': {'endtime': endtime}}]}} ea.writeback_es = mock_es # 4 days old, will return endtime with mock.patch('elastalert.elastalert.ts_now') as mock_ts: mock_ts.return_value = ts_to_dt('2015-01-05T00:00:00Z') # 4 days ahead of the endtime assert ea.get_starttime(ea.rules[0]) == ts_to_dt(endtime) # 10 days old, will return None with mock.patch('elastalert.elastalert.ts_now') as mock_ts: mock_ts.return_value = ts_to_dt('2015-01-11T00:00:00Z') # 10 days ahead of the endtime assert ea.get_starttime(ea.rules[0]) is None def test_set_starttime(ea): # standard query, no starttime, no last run end = ts_to_dt('2014-10-10T10:10:10') with mock.patch.object(ea, 'get_starttime') as mock_gs: mock_gs.return_value = None ea.set_starttime(ea.rules[0], end) assert mock_gs.call_count == 1 assert ea.rules[0]['starttime'] == end - ea.buffer_time # Standard query, no starttime, rule specific buffer_time ea.rules[0].pop('starttime') ea.rules[0]['buffer_time'] = datetime.timedelta(minutes=37) with mock.patch.object(ea, 'get_starttime') as mock_gs: mock_gs.return_value = None ea.set_starttime(ea.rules[0], end) assert mock_gs.call_count == 1 assert ea.rules[0]['starttime'] == end - datetime.timedelta(minutes=37) ea.rules[0].pop('buffer_time') # Standard query, no starttime, last run ea.rules[0].pop('starttime') with mock.patch.object(ea, 'get_starttime') as mock_gs: mock_gs.return_value = ts_to_dt('2014-10-10T00:00:00') ea.set_starttime(ea.rules[0], end) assert mock_gs.call_count == 1 assert ea.rules[0]['starttime'] == ts_to_dt('2014-10-10T00:00:00') # Standard query, no starttime, last run, assure buffer_time doesn't go past ea.rules[0].pop('starttime') ea.rules[0]['buffer_time'] = datetime.timedelta(weeks=1000) with mock.patch.object(ea, 'get_starttime') as mock_gs: mock_gs.return_value = ts_to_dt('2014-10-09T00:00:00') # First call sets minumum_time ea.set_starttime(ea.rules[0], end) # Second call uses buffer_time, but it goes past minimum ea.set_starttime(ea.rules[0], end) assert ea.rules[0]['starttime'] == ts_to_dt('2014-10-09T00:00:00') # Standard query, starttime ea.rules[0].pop('buffer_time') ea.rules[0].pop('minimum_starttime') with mock.patch.object(ea, 'get_starttime') as mock_gs: mock_gs.return_value = None ea.set_starttime(ea.rules[0], end) assert mock_gs.call_count == 0 assert ea.rules[0]['starttime'] == end - ea.buffer_time # Count query, starttime, no previous endtime ea.rules[0]['use_count_query'] = True ea.rules[0]['doc_type'] = 'blah' with mock.patch.object(ea, 'get_starttime') as mock_gs: mock_gs.return_value = None ea.set_starttime(ea.rules[0], end) assert mock_gs.call_count == 0 assert ea.rules[0]['starttime'] == end - ea.run_every # Count query, with previous endtime with mock.patch('elastalert.elastalert.Elasticsearch'): ea.run_rule(ea.rules[0], END, START) ea.set_starttime(ea.rules[0], end) assert ea.rules[0]['starttime'] == END # buffer_time doesn't go past previous endtime ea.rules[0].pop('use_count_query') ea.rules[0]['previous_endtime'] = end - ea.buffer_time * 2 ea.set_starttime(ea.rules[0], end) assert ea.rules[0]['starttime'] == ea.rules[0]['previous_endtime'] def test_kibana_dashboard(ea): match = {'@timestamp': '2014-10-11T00:00:00'} mock_es = mock.Mock() ea.rules[0]['use_kibana_dashboard'] = 'my dashboard' with mock.patch('elastalert.elastalert.Elasticsearch') as mock_es_init: mock_es_init.return_value = mock_es # No dashboard found mock_es.search.return_value = {'hits': {'hits': []}} with pytest.raises(EAException): ea.use_kibana_link(ea.rules[0], match) mock_call = mock_es.search.call_args_list[0][1] assert mock_call['body'] == {'query': {'term': {'_id': 'my dashboard'}}} # Dashboard found mock_es.create.return_value = {'_id': 'ABCDEFG'} mock_es.search.return_value = {'hits': {'hits': [{'_source': {'dashboard': json.dumps(dashboard_temp)}}]}} url = ea.use_kibana_link(ea.rules[0], match) assert 'ABCDEFG' in url db = json.loads(mock_es.create.call_args_list[0][1]['body']['dashboard']) assert 'anytest' in db['title'] # Query key filtering added ea.rules[0]['query_key'] = 'foobar' match['foobar'] = 'baz' url = ea.use_kibana_link(ea.rules[0], match) db = json.loads(mock_es.create.call_args_list[-1][1]['body']['dashboard']) assert db['services']['filter']['list']['1']['field'] == 'foobar' assert db['services']['filter']['list']['1']['query'] == '"baz"' # Compound query key ea.rules[0]['query_key'] = 'foo,bar' ea.rules[0]['compound_query_key'] = ['foo', 'bar'] match['foo'] = 'cat' match['bar'] = 'dog' match['foo,bar'] = 'cat, dog' url = ea.use_kibana_link(ea.rules[0], match) db = json.loads(mock_es.create.call_args_list[-1][1]['body']['dashboard']) found_filters = 0 for filter_id, filter_dict in db['services']['filter']['list'].items(): if (filter_dict['field'] == 'foo' and filter_dict['query'] == '"cat"') or \ (filter_dict['field'] == 'bar' and filter_dict['query'] == '"dog"'): found_filters += 1 continue assert found_filters == 2 def test_rule_changes(ea): ea.rule_hashes = {'rules/rule1.yaml': 'ABC', 'rules/rule2.yaml': 'DEF'} ea.rules = [ea.init_rule(rule, True) for rule in [{'rule_file': 'rules/rule1.yaml', 'name': 'rule1', 'filter': []}, {'rule_file': 'rules/rule2.yaml', 'name': 'rule2', 'filter': []}]] ea.rules[1]['processed_hits'] = ['save me'] new_hashes = {'rules/rule1.yaml': 'ABC', 'rules/rule3.yaml': 'XXX', 'rules/rule2.yaml': '!@#$'} with mock.patch('elastalert.elastalert.get_rule_hashes') as mock_hashes: with mock.patch('elastalert.elastalert.load_configuration') as mock_load: mock_load.side_effect = [{'filter': [], 'name': 'rule2', 'rule_file': 'rules/rule2.yaml'}, {'filter': [], 'name': 'rule3', 'rule_file': 'rules/rule3.yaml'}] mock_hashes.return_value = new_hashes ea.load_rule_changes() # All 3 rules still exist assert ea.rules[0]['name'] == 'rule1' assert ea.rules[1]['name'] == 'rule2' assert ea.rules[1]['processed_hits'] == ['save me'] assert ea.rules[2]['name'] == 'rule3' # Assert 2 and 3 were reloaded assert mock_load.call_count == 2 mock_load.assert_any_call('rules/rule2.yaml', ea.conf) mock_load.assert_any_call('rules/rule3.yaml', ea.conf) # A new rule with a conflicting name wont load new_hashes = copy.copy(new_hashes) new_hashes.update({'rules/rule4.yaml': 'asdf'}) with mock.patch('elastalert.elastalert.get_rule_hashes') as mock_hashes: with mock.patch('elastalert.elastalert.load_configuration') as mock_load: with mock.patch.object(ea, 'send_notification_email') as mock_send: mock_load.return_value = {'filter': [], 'name': 'rule3', 'new': 'stuff', 'rule_file': 'rules/rule4.yaml'} mock_hashes.return_value = new_hashes ea.load_rule_changes() mock_send.assert_called_once() assert len(ea.rules) == 3 assert not any(['new' in rule for rule in ea.rules]) # An old rule which didn't load gets reloaded new_hashes = copy.copy(new_hashes) new_hashes['rules/rule4.yaml'] = 'qwerty' with mock.patch('elastalert.elastalert.get_rule_hashes') as mock_hashes: with mock.patch('elastalert.elastalert.load_configuration') as mock_load: mock_load.return_value = {'filter': [], 'name': 'rule4', 'new': 'stuff', 'rule_file': 'rules/rule4.yaml'} mock_hashes.return_value = new_hashes ea.load_rule_changes() assert len(ea.rules) == 4 def test_strf_index(ea): """ Test that the get_index function properly generates indexes spanning days """ ea.rules[0]['index'] = 'logstash-%Y.%m.%d' ea.rules[0]['use_strftime_index'] = True # Test formatting with times start = ts_to_dt('2015-01-02T12:34:45Z') end = ts_to_dt('2015-01-02T16:15:14Z') assert ea.get_index(ea.rules[0], start, end) == 'logstash-2015.01.02' end = ts_to_dt('2015-01-03T01:02:03Z') assert ea.get_index(ea.rules[0], start, end) == 'logstash-2015.01.02,logstash-2015.01.03' # Test formatting for wildcard assert ea.get_index(ea.rules[0]) == 'logstash-*' ea.rules[0]['index'] = 'logstash-%Y.%m' assert ea.get_index(ea.rules[0]) == 'logstash-*' ea.rules[0]['index'] = 'logstash-%Y.%m-stuff' assert ea.get_index(ea.rules[0]) == 'logstash-*-stuff' def test_count_keys(ea): ea.rules[0]['timeframe'] = datetime.timedelta(minutes=60) ea.rules[0]['top_count_keys'] = ['this', 'that'] ea.rules[0]['type'].matches = {'@timestamp': END} ea.rules[0]['doc_type'] = 'blah' buckets = [{'aggregations': {'filtered': {'counts': {'buckets': [{'key': 'a', 'doc_count': 10}, {'key': 'b', 'doc_count': 5}]}}}}, {'aggregations': {'filtered': {'counts': {'buckets': [{'key': 'd', 'doc_count': 10}, {'key': 'c', 'doc_count': 12}]}}}}] ea.current_es.search.side_effect = buckets counts = ea.get_top_counts(ea.rules[0], START, END, ['this', 'that']) calls = ea.current_es.search.call_args_list assert calls[0][1]['search_type'] == 'count' assert calls[0][1]['body']['aggs']['filtered']['aggs']['counts']['terms'] == {'field': 'this', 'size': 5} assert counts['top_events_this'] == {'a': 10, 'b': 5} assert counts['top_events_that'] == {'d': 10, 'c': 12} def test_exponential_realert(ea): ea.rules[0]['exponential_realert'] = datetime.timedelta(days=1) # 1 day ~ 10 * 2**13 seconds ea.rules[0]['realert'] = datetime.timedelta(seconds=10) until = ts_to_dt('2015-03-24T00:00:00') ts5s = until + datetime.timedelta(seconds=5) ts15s = until + datetime.timedelta(seconds=15) ts1m = until + datetime.timedelta(minutes=1) ts5m = until + datetime.timedelta(minutes=5) ts4h = until + datetime.timedelta(hours=4) test_values = [(ts5s, until, 0), # Exp will increase to 1, 10*2**0 = 10s (ts15s, until, 0), # Exp will stay at 0, 10*2**0 = 10s (ts15s, until, 1), # Exp will increase to 2, 10*2**1 = 20s (ts1m, until, 2), # Exp will decrease to 1, 10*2**2 = 40s (ts1m, until, 3), # Exp will increase to 4, 10*2**3 = 1m20s (ts5m, until, 1), # Exp will lower back to 0, 10*2**1 = 20s (ts4h, until, 9), # Exp will lower back to 0, 10*2**9 = 1h25m (ts4h, until, 10), # Exp will lower back to 9, 10*2**10 = 2h50m (ts4h, until, 11)] # Exp will increase to 12, 10*2**11 = 5h results = (1, 0, 2, 1, 4, 0, 0, 9, 12) next_res = iter(results) for args in test_values: ea.silence_cache[ea.rules[0]['name']] = (args[1], args[2]) next_alert, exponent = ea.next_alert_time(ea.rules[0], ea.rules[0]['name'], args[0]) assert exponent == next_res.next() def test_stop(ea): """ The purpose of this test is to make sure that calling ElastAlerter.stop() will break it out of a ElastAlerter.start() loop. This method exists to provide a mechanism for running ElastAlert with threads and thus must be tested with threads. mock_loop verifies the loop is running and will call stop after several iterations. """ # Exit the thread on the fourth iteration def mock_loop(): for i in range(3): assert ea.running yield ea.stop() with mock.patch.object(ea, 'sleep_for', return_value=None): with mock.patch.object(ea, 'run_all_rules') as mock_run: mock_run.side_effect = mock_loop() start_thread = threading.Thread(target=ea.start) # Set as daemon to prevent a failed test from blocking exit start_thread.daemon = True start_thread.start() # Give it a few seconds to run the loop start_thread.join(5) assert not ea.running assert not start_thread.is_alive() assert mock_run.call_count == 4 def test_notify_email(ea): mock_smtp = mock.Mock() ea.rules[0]['notify_email'] = ['foo@foo.foo', 'bar@bar.bar'] with mock.patch('elastalert.elastalert.SMTP') as mock_smtp_f: mock_smtp_f.return_value = mock_smtp # Notify_email from rules, array ea.send_notification_email('omg', rule=ea.rules[0]) assert set(mock_smtp.sendmail.call_args_list[0][0][1]) == set(ea.rules[0]['notify_email']) # With ea.notify_email ea.notify_email = ['baz@baz.baz'] ea.send_notification_email('omg', rule=ea.rules[0]) assert set(mock_smtp.sendmail.call_args_list[1][0][1]) == set(['baz@baz.baz'] + ea.rules[0]['notify_email']) # With ea.notify email but as single string ea.rules[0]['notify_email'] = 'foo@foo.foo' ea.send_notification_email('omg', rule=ea.rules[0]) assert set(mock_smtp.sendmail.call_args_list[2][0][1]) == set(['baz@baz.baz', 'foo@foo.foo']) # None from rule ea.rules[0].pop('notify_email') ea.send_notification_email('omg', rule=ea.rules[0]) assert set(mock_smtp.sendmail.call_args_list[3][0][1]) == set(['baz@baz.baz']) def test_uncaught_exceptions(ea): e = Exception("Errors yo!") # With disabling set to false ea.disable_rules_on_error = False ea.handle_uncaught_exception(e, ea.rules[0]) assert len(ea.rules) == 1 assert len(ea.disabled_rules) == 0 # With disabling set to true ea.disable_rules_on_error = True ea.handle_uncaught_exception(e, ea.rules[0]) assert len(ea.rules) == 0 assert len(ea.disabled_rules) == 1 # Changing the file should re-enable it ea.rule_hashes = {'rule1': 'abc'} new_hashes = {'rule1': 'def'} with mock.patch('elastalert.elastalert.get_rule_hashes') as mock_hashes: with mock.patch('elastalert.elastalert.load_configuration') as mock_load: mock_load.side_effect = [ea.disabled_rules[0]] mock_hashes.return_value = new_hashes ea.load_rule_changes() assert len(ea.rules) == 1 assert len(ea.disabled_rules) == 0 # Notify email is sent ea.notify_email = 'qlo@example.com' with mock.patch.object(ea, 'send_notification_email') as mock_email: ea.handle_uncaught_exception(e, ea.rules[0]) assert mock_email.call_args_list[0][1] == {'exception': e, 'rule': ea.disabled_rules[0]}
wsdump.py
#!/usr/bin/env python3 """ 测试websocket用的工具 需要安装的依赖: websocket-client """ import argparse import code import sys import threading import time import ssl import six from six.moves.urllib.parse import urlparse import websocket try: import readline except ImportError: pass def get_encoding(): encoding = getattr(sys.stdin, "encoding", "") if not encoding: return "utf-8" else: return encoding.lower() OPCODE_DATA = (websocket.ABNF.OPCODE_TEXT, websocket.ABNF.OPCODE_BINARY) ENCODING = get_encoding() class VAction(argparse.Action): def __call__(self, parser, args, values, option_string=None): if values is None: values = "1" try: values = int(values) except ValueError: values = values.count("v") + 1 setattr(args, self.dest, values) def parse_args(): parser = argparse.ArgumentParser(description="WebSocket Simple Dump Tool") parser.add_argument("url", metavar="ws_url", help="websocket url. ex. ws://echo.websocket.org/") parser.add_argument("-p", "--proxy", help="proxy url. ex. http://127.0.0.1:8080") parser.add_argument("-v", "--verbose", default=0, nargs='?', action=VAction, dest="verbose", help="set verbose mode. If set to 1, show opcode. " "If set to 2, enable to trace websocket module") parser.add_argument("-n", "--nocert", action='store_true', help="Ignore invalid SSL cert") parser.add_argument("-r", "--raw", action="store_true", help="raw output") parser.add_argument("-s", "--subprotocols", nargs='*', help="Set subprotocols") parser.add_argument("-o", "--origin", help="Set origin") parser.add_argument("--eof-wait", default=0, type=int, help="wait time(second) after 'EOF' received.") parser.add_argument("-t", "--text", help="Send initial text") parser.add_argument("--timings", action="store_true", help="Print timings in seconds") parser.add_argument("--headers", help="Set custom headers. Use ',' as separator") return parser.parse_args() class RawInput: def raw_input(self, prompt): if six.PY3: line = input(prompt) else: line = raw_input(prompt) if ENCODING and ENCODING != "utf-8" and not isinstance(line, six.text_type): line = line.decode(ENCODING).encode("utf-8") elif isinstance(line, six.text_type): line = line.encode("utf-8") return line class InteractiveConsole(RawInput, code.InteractiveConsole): def write(self, data): sys.stdout.write("\033[2K\033[E") # sys.stdout.write("\n") sys.stdout.write("\033[34m< " + data + "\033[39m") sys.stdout.write("\n> ") sys.stdout.flush() def read(self): return self.raw_input("> ") class NonInteractive(RawInput): def write(self, data): sys.stdout.write(data) sys.stdout.write("\n") sys.stdout.flush() def read(self): return self.raw_input("") def main(): start_time = time.time() args = parse_args() if args.verbose > 1: websocket.enableTrace(True) options = {} if args.proxy: p = urlparse(args.proxy) options["http_proxy_host"] = p.hostname options["http_proxy_port"] = p.port if args.origin: options["origin"] = args.origin if args.subprotocols: options["subprotocols"] = args.subprotocols opts = {} if args.nocert: opts = {"cert_reqs": ssl.CERT_NONE, "check_hostname": False} if args.headers: options['header'] = map(str.strip, args.headers.split(',')) ws = websocket.create_connection(args.url, sslopt=opts, **options) if args.raw: console = NonInteractive() else: console = InteractiveConsole() print("Press Ctrl+C to quit") def recv(): try: frame = ws.recv_frame() except websocket.WebSocketException: return websocket.ABNF.OPCODE_CLOSE, None if not frame: raise websocket.WebSocketException("Not a valid frame %s" % frame) elif frame.opcode in OPCODE_DATA: return frame.opcode, frame.data elif frame.opcode == websocket.ABNF.OPCODE_CLOSE: ws.send_close() return frame.opcode, None elif frame.opcode == websocket.ABNF.OPCODE_PING: ws.pong(frame.data) return frame.opcode, frame.data return frame.opcode, frame.data def recv_ws(): while True: opcode, data = recv() msg = None if six.PY3 and opcode == websocket.ABNF.OPCODE_TEXT and isinstance(data, bytes): data = str(data, "utf-8") if not args.verbose and opcode in OPCODE_DATA: msg = data elif args.verbose: msg = "%s: %s" % (websocket.ABNF.OPCODE_MAP.get(opcode), data) if msg is not None: if args.timings: console.write(str(time.time() - start_time) + ": " + msg) else: console.write(msg) if opcode == websocket.ABNF.OPCODE_CLOSE: break thread = threading.Thread(target=recv_ws) thread.daemon = True thread.start() if args.text: ws.send(args.text) while True: try: message = console.read() ws.send(message) except KeyboardInterrupt: return except EOFError: time.sleep(args.eof_wait) return if __name__ == "__main__": try: main() except Exception as e: print(e)
mail.py
#!/usr/bin/env python # -*- coding=UTF-8 -*- # ************************************************************************* # Copyright © 2015 JiangLin. All rights reserved. # File Name: mail.py # Author:JiangLin # Mail:mail@honmaple.com # Created Time: 2015-11-27 21:59:02 # ************************************************************************* from flask_mail import Mail as _Mail from flask_mail import Message from threading import Thread from itsdangerous import (URLSafeTimedSerializer, BadSignature, SignatureExpired) from flask import current_app from .utils import gen_secret_key mail = _Mail() class Mail(object): def __init__(self, app=None): if app is not None: self.init_app(app) def init_app(self, app): self.app = app mail.init_app(app) def send_async_email(self, msg): with self.app.app_context(): mail.send(msg) def send_email(self, *args, **kwargs): msg = Message(*args, **kwargs) thr = Thread(target=self.send_async_email, args=[msg]) thr.start() class MailMixin(object): @classmethod def _token_serializer(cls, key=None, salt=None): config = current_app.config if key is None: key = config.setdefault('SECRET_KEY', gen_secret_key(24)) if salt is None: salt = config.setdefault('SECRET_KEY_SALT', gen_secret_key(24)) return URLSafeTimedSerializer(key, salt=salt) @property def email_token(self): serializer = self._token_serializer() token = serializer.dumps(self.email) return token @classmethod def check_email_token(cls, token, max_age=259200): serializer = cls._token_serializer() try: email = serializer.loads(token, max_age=max_age) except BadSignature: return False except SignatureExpired: return False user = cls.query.filter_by(email=email).first() if user is None: return False return user # def send_email(self, *args, **kwargs): # kwargs.update(recipients=[self.email]) # mail.send_email(*args, **kwargs)
custom.py
# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- # pylint: disable=too-few-public-methods,no-self-use,too-many-locals,line-too-long,unused-argument import errno try: import msvcrt from ._vt_helper import enable_vt_mode except ImportError: # Not supported for Linux machines. pass import os import platform import shlex import signal import sys import threading import time try: import termios import tty except ImportError: # Not supported for Windows machines. pass import websocket import yaml from knack.log import get_logger from knack.prompting import prompt_pass, prompt, NoTTYException from knack.util import CLIError from azure.mgmt.containerinstance.models import (AzureFileVolume, Container, ContainerGroup, ContainerGroupNetworkProtocol, ContainerPort, ImageRegistryCredential, IpAddress, Port, ResourceRequests, ResourceRequirements, Volume, VolumeMount, ContainerExecRequest, ContainerExecRequestTerminalSize, GitRepoVolume, LogAnalytics, ContainerGroupDiagnostics, ContainerGroupSubnetId, ContainerGroupIpAddressType, ResourceIdentityType, ContainerGroupIdentity) from azure.cli.core.util import sdk_no_wait from azure.cli.core.azclierror import RequiredArgumentMissingError from ._client_factory import (cf_container_groups, cf_container, cf_log_analytics_workspace, cf_log_analytics_workspace_shared_keys, cf_resource, cf_network, cf_msi) logger = get_logger(__name__) WINDOWS_NAME = 'Windows' SERVER_DELIMITER = '.' ACR_SERVER_DELIMITER = '.azurecr.io' AZURE_FILE_VOLUME_NAME = 'azurefile' SECRETS_VOLUME_NAME = 'secrets' GITREPO_VOLUME_NAME = 'gitrepo' MSI_LOCAL_ID = '[system]' def list_containers(client, resource_group_name=None): """List all container groups in a resource group. """ if resource_group_name is None: return client.list() return client.list_by_resource_group(resource_group_name) def get_container(client, resource_group_name, name): """Show details of a container group. """ return client.get(resource_group_name, name) def delete_container(client, resource_group_name, name, **kwargs): """Delete a container group. """ return client.begin_delete(resource_group_name, name) # pylint: disable=too-many-statements def create_container(cmd, resource_group_name, name=None, image=None, location=None, cpu=1, memory=1.5, restart_policy='Always', ports=None, protocol=None, os_type='Linux', ip_address=None, dns_name_label=None, command_line=None, environment_variables=None, secure_environment_variables=None, registry_login_server=None, registry_username=None, registry_password=None, azure_file_volume_share_name=None, azure_file_volume_account_name=None, azure_file_volume_account_key=None, azure_file_volume_mount_path=None, log_analytics_workspace=None, log_analytics_workspace_key=None, vnet=None, vnet_name=None, vnet_address_prefix='10.0.0.0/16', subnet=None, subnet_address_prefix='10.0.0.0/24', gitrepo_url=None, gitrepo_dir='.', gitrepo_revision=None, gitrepo_mount_path=None, secrets=None, secrets_mount_path=None, file=None, assign_identity=None, identity_scope=None, identity_role='Contributor', no_wait=False, acr_identity=None): """Create a container group. """ if file: return _create_update_from_file(cmd.cli_ctx, resource_group_name, name, location, file, no_wait) if not name: raise CLIError("error: the --name/-n argument is required unless specified with a passed in file.") if not image: raise CLIError("error: the --image argument is required unless specified with a passed in file.") ports = ports or [80] protocol = protocol or ContainerGroupNetworkProtocol.tcp container_resource_requirements = _create_resource_requirements(cpu=cpu, memory=memory) image_registry_credentials = _create_image_registry_credentials(cmd=cmd, resource_group_name=resource_group_name, registry_login_server=registry_login_server, registry_username=registry_username, registry_password=registry_password, image=image, identity=acr_identity) command = shlex.split(command_line) if command_line else None volumes = [] mounts = [] azure_file_volume = _create_azure_file_volume(azure_file_volume_share_name=azure_file_volume_share_name, azure_file_volume_account_name=azure_file_volume_account_name, azure_file_volume_account_key=azure_file_volume_account_key) azure_file_volume_mount = _create_azure_file_volume_mount(azure_file_volume=azure_file_volume, azure_file_volume_mount_path=azure_file_volume_mount_path) if azure_file_volume: volumes.append(azure_file_volume) mounts.append(azure_file_volume_mount) secrets_volume = _create_secrets_volume(secrets) secrets_volume_mount = _create_secrets_volume_mount(secrets_volume=secrets_volume, secrets_mount_path=secrets_mount_path) if secrets_volume: volumes.append(secrets_volume) mounts.append(secrets_volume_mount) diagnostics = None tags = {} if log_analytics_workspace and log_analytics_workspace_key: log_analytics = LogAnalytics( workspace_id=log_analytics_workspace, workspace_key=log_analytics_workspace_key) diagnostics = ContainerGroupDiagnostics( log_analytics=log_analytics ) elif log_analytics_workspace and not log_analytics_workspace_key: diagnostics, tags = _get_diagnostics_from_workspace( cmd.cli_ctx, log_analytics_workspace) if not diagnostics: raise CLIError('Log Analytics workspace "' + log_analytics_workspace + '" not found.') elif not log_analytics_workspace and log_analytics_workspace_key: raise CLIError('"--log-analytics-workspace-key" requires "--log-analytics-workspace".') gitrepo_volume = _create_gitrepo_volume(gitrepo_url=gitrepo_url, gitrepo_dir=gitrepo_dir, gitrepo_revision=gitrepo_revision) gitrepo_volume_mount = _create_gitrepo_volume_mount(gitrepo_volume=gitrepo_volume, gitrepo_mount_path=gitrepo_mount_path) if gitrepo_volume: volumes.append(gitrepo_volume) mounts.append(gitrepo_volume_mount) # Concatenate secure and standard environment variables if environment_variables and secure_environment_variables: environment_variables = environment_variables + secure_environment_variables else: environment_variables = environment_variables or secure_environment_variables identity = None if assign_identity is not None: identity = _build_identities_info(assign_identity) # Set up VNET and subnet if needed subnet_id = None cgroup_subnet = None if subnet: subnet_id = _get_subnet_id(cmd, location, resource_group_name, vnet, vnet_address_prefix, subnet, subnet_address_prefix) cgroup_subnet = [ContainerGroupSubnetId(id=subnet_id)] cgroup_ip_address = _create_ip_address(ip_address, ports, protocol, dns_name_label, subnet_id) container = Container(name=name, image=image, resources=container_resource_requirements, command=command, ports=[ContainerPort( port=p, protocol=protocol) for p in ports] if cgroup_ip_address else None, environment_variables=environment_variables, volume_mounts=mounts or None) cgroup = ContainerGroup(location=location, identity=identity, containers=[container], os_type=os_type, restart_policy=restart_policy, ip_address=cgroup_ip_address, image_registry_credentials=image_registry_credentials, volumes=volumes or None, subnet_ids=cgroup_subnet, diagnostics=diagnostics, tags=tags) container_group_client = cf_container_groups(cmd.cli_ctx) lro = sdk_no_wait(no_wait, container_group_client.begin_create_or_update, resource_group_name, name, cgroup) if assign_identity is not None and identity_scope: from azure.cli.core.commands.arm import assign_identity cg = container_group_client.get(resource_group_name, name) assign_identity(cmd.cli_ctx, lambda: cg, lambda cg: cg, identity_role, identity_scope) return lro def _build_identities_info(identities): identities = identities or [] identity_type = ResourceIdentityType.none if not identities or MSI_LOCAL_ID in identities: identity_type = ResourceIdentityType.system_assigned external_identities = [x for x in identities if x != MSI_LOCAL_ID] if external_identities and identity_type == ResourceIdentityType.system_assigned: identity_type = ResourceIdentityType.system_assigned_user_assigned elif external_identities: identity_type = ResourceIdentityType.user_assigned identity = ContainerGroupIdentity(type=identity_type) if external_identities: identity.user_assigned_identities = {e: {} for e in external_identities} return identity def _get_resource(client, resource_group_name, *subresources): from azure.core.exceptions import HttpResponseError try: resource = client.get(resource_group_name, *subresources) return resource except HttpResponseError as ex: if ex.error.code == "NotFound" or ex.error.code == "ResourceNotFound": return None raise def _get_subnet_id(cmd, location, resource_group_name, vnet, vnet_address_prefix, subnet, subnet_address_prefix): from azure.cli.core.profiles import ResourceType from msrestazure.tools import parse_resource_id, is_valid_resource_id aci_delegation_service_name = "Microsoft.ContainerInstance/containerGroups" Delegation = cmd.get_models('Delegation', resource_type=ResourceType.MGMT_NETWORK) aci_delegation = Delegation( name=aci_delegation_service_name, service_name=aci_delegation_service_name ) ncf = cf_network(cmd.cli_ctx) vnet_name = vnet subnet_name = subnet if is_valid_resource_id(subnet): parsed_subnet_id = parse_resource_id(subnet) subnet_name = parsed_subnet_id['resource_name'] vnet_name = parsed_subnet_id['name'] resource_group_name = parsed_subnet_id['resource_group'] elif is_valid_resource_id(vnet): parsed_vnet_id = parse_resource_id(vnet) vnet_name = parsed_vnet_id['resource_name'] resource_group_name = parsed_vnet_id['resource_group'] subnet = _get_resource(ncf.subnets, resource_group_name, vnet_name, subnet_name) # For an existing subnet, validate and add delegation if needed if subnet: logger.info('Using existing subnet "%s" in resource group "%s"', subnet.name, resource_group_name) for sal in (subnet.service_association_links or []): if sal.linked_resource_type != aci_delegation_service_name: raise CLIError("Can not use subnet with existing service association links other than {}.".format(aci_delegation_service_name)) if not subnet.delegations: logger.info('Adding ACI delegation to the existing subnet.') subnet.delegations = [aci_delegation] subnet = ncf.subnets.begin_create_or_update(resource_group_name, vnet_name, subnet_name, subnet).result() else: for delegation in subnet.delegations: if delegation.service_name != aci_delegation_service_name: raise CLIError("Can not use subnet with existing delegations other than {}".format(aci_delegation_service_name)) # Create new subnet and Vnet if not exists else: Subnet, VirtualNetwork, AddressSpace = cmd.get_models('Subnet', 'VirtualNetwork', 'AddressSpace', resource_type=ResourceType.MGMT_NETWORK) vnet = _get_resource(ncf.virtual_networks, resource_group_name, vnet_name) if not vnet: logger.info('Creating new vnet "%s" in resource group "%s"', vnet_name, resource_group_name) ncf.virtual_networks.begin_create_or_update(resource_group_name, vnet_name, VirtualNetwork(name=vnet_name, location=location, address_space=AddressSpace(address_prefixes=[vnet_address_prefix]))) subnet = Subnet( name=subnet_name, location=location, address_prefix=subnet_address_prefix, delegations=[aci_delegation]) logger.info('Creating new subnet "%s" in resource group "%s"', subnet_name, resource_group_name) subnet = ncf.subnets.begin_create_or_update(resource_group_name, vnet_name, subnet_name, subnet).result() return subnet.id def _get_diagnostics_from_workspace(cli_ctx, log_analytics_workspace): from msrestazure.tools import parse_resource_id log_analytics_workspace_client = cf_log_analytics_workspace(cli_ctx) log_analytics_workspace_shared_keys_client = cf_log_analytics_workspace_shared_keys(cli_ctx) for workspace in log_analytics_workspace_client.list(): if log_analytics_workspace in (workspace.name, workspace.customer_id): keys = log_analytics_workspace_shared_keys_client.get_shared_keys( parse_resource_id(workspace.id)['resource_group'], workspace.name) log_analytics = LogAnalytics( workspace_id=workspace.customer_id, workspace_key=keys.primary_shared_key) diagnostics = ContainerGroupDiagnostics( log_analytics=log_analytics) return (diagnostics, {'oms-resource-link': workspace.id}) return None, {} # pylint: disable=unsupported-assignment-operation,protected-access def _create_update_from_file(cli_ctx, resource_group_name, name, location, file, no_wait): resource_client = cf_resource(cli_ctx) container_group_client = cf_container_groups(cli_ctx) cg_defintion = None try: with open(file, 'r') as f: cg_defintion = yaml.safe_load(f) except OSError: # FileNotFoundError introduced in Python 3 raise CLIError("No such file or directory: " + file) except yaml.YAMLError as e: raise CLIError("Error while parsing yaml file:\n\n" + str(e)) # Validate names match if both are provided if name and cg_defintion.get('name', None): if name != cg_defintion.get('name', None): raise CLIError("The name parameter and name from yaml definition must match.") else: # Validate at least one name is provided name = name or cg_defintion.get('name', None) if cg_defintion.get('name', None) is None and not name: raise CLIError("The name of the container group is required") cg_defintion['name'] = name if cg_defintion.get('location'): location = cg_defintion.get('location') cg_defintion['location'] = location api_version = cg_defintion.get('apiVersion', None) or container_group_client._config.api_version return sdk_no_wait(no_wait, resource_client.resources.begin_create_or_update, resource_group_name, "Microsoft.ContainerInstance", '', "containerGroups", name, api_version, cg_defintion) # pylint: disable=inconsistent-return-statements def _create_resource_requirements(cpu, memory): """Create resource requirements. """ if cpu or memory: container_resource_requests = ResourceRequests(memory_in_gb=memory, cpu=cpu) return ResourceRequirements(requests=container_resource_requests) def _create_image_registry_credentials(cmd, resource_group_name, registry_login_server, registry_username, registry_password, image, identity): from msrestazure.tools import is_valid_resource_id image_registry_credentials = None if registry_login_server: if not registry_username: raise RequiredArgumentMissingError('Please specify --registry-username in order to use custom image registry.') if not registry_password: try: registry_password = prompt_pass(msg='Image registry password: ') except NoTTYException: raise RequiredArgumentMissingError('Please specify --registry-password in order to use custom image registry.') image_registry_credentials = [ImageRegistryCredential(server=registry_login_server, username=registry_username, password=registry_password)] elif ACR_SERVER_DELIMITER in image.split("/")[0]: acr_server = image.split("/")[0] if image.split("/") else None if identity: if not is_valid_resource_id(identity): msi_client = cf_msi(cmd.cli_ctx) identity = msi_client.user_assigned_identities.get(resource_group_name=resource_group_name, resource_name=identity).id if acr_server: image_registry_credentials = [ImageRegistryCredential(server=acr_server, username=registry_username, password=registry_password, identity=identity)] else: if not registry_username: try: registry_username = prompt(msg='Image registry username: ') except NoTTYException: raise RequiredArgumentMissingError('Please specify --registry-username in order to use Azure Container Registry.') if not registry_password: try: registry_password = prompt_pass(msg='Image registry password: ') except NoTTYException: raise RequiredArgumentMissingError('Please specify --registry-password in order to use Azure Container Registry.') if acr_server: image_registry_credentials = [ImageRegistryCredential(server=acr_server, username=registry_username, password=registry_password)] elif registry_username and registry_password and SERVER_DELIMITER in image.split("/")[0]: login_server = image.split("/")[0] if image.split("/") else None if login_server: image_registry_credentials = [ImageRegistryCredential(server=login_server, username=registry_username, password=registry_password)] else: raise RequiredArgumentMissingError('Failed to parse login server from image name; please explicitly specify --registry-server.') return image_registry_credentials def _create_azure_file_volume(azure_file_volume_share_name, azure_file_volume_account_name, azure_file_volume_account_key): """Create Azure File volume. """ azure_file_volume = None if azure_file_volume_share_name: if not azure_file_volume_account_name: raise CLIError('Please specify --azure-file-volume-account-name in order to use Azure File volume.') if not azure_file_volume_account_key: try: azure_file_volume_account_key = prompt_pass(msg='Azure File storage account key: ') except NoTTYException: raise CLIError('Please specify --azure-file-volume-account-key in order to use Azure File volume.') azure_file_volume = AzureFileVolume(share_name=azure_file_volume_share_name, storage_account_name=azure_file_volume_account_name, storage_account_key=azure_file_volume_account_key) return Volume(name=AZURE_FILE_VOLUME_NAME, azure_file=azure_file_volume) if azure_file_volume else None def _create_secrets_volume(secrets): """Create secrets volume. """ return Volume(name=SECRETS_VOLUME_NAME, secret=secrets) if secrets else None def _create_gitrepo_volume(gitrepo_url, gitrepo_dir, gitrepo_revision): """Create Git Repo volume. """ gitrepo_volume = GitRepoVolume(repository=gitrepo_url, directory=gitrepo_dir, revision=gitrepo_revision) return Volume(name=GITREPO_VOLUME_NAME, git_repo=gitrepo_volume) if gitrepo_url else None # pylint: disable=inconsistent-return-statements def _create_azure_file_volume_mount(azure_file_volume, azure_file_volume_mount_path): """Create Azure File volume mount. """ if azure_file_volume_mount_path: if not azure_file_volume: raise CLIError('Please specify --azure-file-volume-share-name --azure-file-volume-account-name --azure-file-volume-account-key ' 'to enable Azure File volume mount.') return VolumeMount(name=AZURE_FILE_VOLUME_NAME, mount_path=azure_file_volume_mount_path) def _create_secrets_volume_mount(secrets_volume, secrets_mount_path): """Create secrets volume mount. """ if secrets_volume: if not secrets_mount_path: raise CLIError('Please specify --secrets --secrets-mount-path ' 'to enable secrets volume mount.') return VolumeMount(name=SECRETS_VOLUME_NAME, mount_path=secrets_mount_path) def _create_gitrepo_volume_mount(gitrepo_volume, gitrepo_mount_path): """Create Git Repo volume mount. """ if gitrepo_mount_path: if not gitrepo_volume: raise CLIError('Please specify --gitrepo-url (--gitrepo-dir --gitrepo-revision) ' 'to enable Git Repo volume mount.') return VolumeMount(name=GITREPO_VOLUME_NAME, mount_path=gitrepo_mount_path) # pylint: disable=inconsistent-return-statements def _create_ip_address(ip_address, ports, protocol, dns_name_label, subnet_id): """Create IP address. """ if (ip_address and ip_address.lower() == 'public') or dns_name_label: return IpAddress(ports=[Port(protocol=protocol, port=p) for p in ports], dns_name_label=dns_name_label, type=ContainerGroupIpAddressType.public) if subnet_id: return IpAddress(ports=[Port(protocol=protocol, port=p) for p in ports], type=ContainerGroupIpAddressType.private) # pylint: disable=inconsistent-return-statements def container_logs(cmd, resource_group_name, name, container_name=None, follow=False): """Tail a container instance log. """ container_client = cf_container(cmd.cli_ctx) container_group_client = cf_container_groups(cmd.cli_ctx) container_group = container_group_client.get(resource_group_name, name) # If container name is not present, use the first container. if container_name is None: container_name = container_group.containers[0].name if not follow: log = container_client.list_logs(resource_group_name, name, container_name) print(log.content) else: _start_streaming( terminate_condition=_is_container_terminated, terminate_condition_args=(container_group_client, resource_group_name, name, container_name), shupdown_grace_period=5, stream_target=_stream_logs, stream_args=(container_client, resource_group_name, name, container_name, container_group.restart_policy)) # pylint: disable=protected-access def container_export(cmd, resource_group_name, name, file): resource_client = cf_resource(cmd.cli_ctx) container_group_client = cf_container_groups(cmd.cli_ctx) resource = resource_client.resources.get(resource_group_name, "Microsoft.ContainerInstance", '', "containerGroups", name, container_group_client._config.api_version).__dict__ # Remove unwanted properites resource['properties'].pop('instanceView', None) resource.pop('sku', None) resource.pop('id', None) resource.pop('plan', None) resource.pop('kind', None) resource.pop('managed_by', None) resource['properties'].pop('provisioningState', None) # Correctly export the identity try: identity = resource['identity'].type if identity != ResourceIdentityType.none: resource['identity'] = resource['identity'].__dict__ identity_entry = {'type': resource['identity']['type'].value} if resource['identity']['user_assigned_identities']: identity_entry['user_assigned_identities'] = {k: {} for k in resource['identity']['user_assigned_identities']} resource['identity'] = identity_entry except (KeyError, AttributeError): resource.pop('indentity', None) # Remove container instance views for i in range(len(resource['properties']['containers'])): resource['properties']['containers'][i]['properties'].pop('instanceView', None) # Add the api version resource['apiVersion'] = container_group_client._config.api_version with open(file, 'w+') as f: yaml.safe_dump(resource, f, default_flow_style=False) def container_exec(cmd, resource_group_name, name, exec_command, container_name=None): """Start exec for a container. """ container_client = cf_container(cmd.cli_ctx) container_group_client = cf_container_groups(cmd.cli_ctx) container_group = container_group_client.get(resource_group_name, name) if container_name or container_name is None and len(container_group.containers) == 1: # If only one container in container group, use that container. if container_name is None: container_name = container_group.containers[0].name try: terminalsize = os.get_terminal_size() except OSError: terminalsize = os.terminal_size((80, 24)) terminal_size = ContainerExecRequestTerminalSize(rows=terminalsize.lines, cols=terminalsize.columns) exec_request = ContainerExecRequest(command=exec_command, terminal_size=terminal_size) execContainerResponse = container_client.execute_command(resource_group_name, name, container_name, exec_request) if platform.system() is WINDOWS_NAME: _start_exec_pipe_windows(execContainerResponse.web_socket_uri, execContainerResponse.password) else: _start_exec_pipe_linux(execContainerResponse.web_socket_uri, execContainerResponse.password) else: raise CLIError('--container-name required when container group has more than one container.') def _start_exec_pipe_windows(web_socket_uri, password): import colorama colorama.deinit() enable_vt_mode() buff = bytearray() lock = threading.Lock() def _on_ws_open_windows(ws): ws.send(password) readKeyboard = threading.Thread(target=_capture_stdin, args=[_getch_windows, buff, lock], daemon=True) readKeyboard.start() flushKeyboard = threading.Thread(target=_flush_stdin, args=[ws, buff, lock], daemon=True) flushKeyboard.start() ws = websocket.WebSocketApp(web_socket_uri, on_open=_on_ws_open_windows, on_message=_on_ws_msg) # in windows, msvcrt.getch doesn't give us ctrl+C so we have to manually catch it with kb interrupt and send it over the socket websocketRun = threading.Thread(target=ws.run_forever) websocketRun.start() while websocketRun.is_alive(): try: time.sleep(0.01) except KeyboardInterrupt: try: ws.send(b'\x03') # CTRL-C character (ETX character) finally: pass colorama.reinit() def _start_exec_pipe_linux(web_socket_uri, password): stdin_fd = sys.stdin.fileno() old_tty = termios.tcgetattr(stdin_fd) old_winch_handler = signal.getsignal(signal.SIGWINCH) tty.setraw(stdin_fd) tty.setcbreak(stdin_fd) buff = bytearray() lock = threading.Lock() def _on_ws_open_linux(ws): ws.send(password) readKeyboard = threading.Thread(target=_capture_stdin, args=[_getch_linux, buff, lock], daemon=True) readKeyboard.start() flushKeyboard = threading.Thread(target=_flush_stdin, args=[ws, buff, lock], daemon=True) flushKeyboard.start() ws = websocket.WebSocketApp(web_socket_uri, on_open=_on_ws_open_linux, on_message=_on_ws_msg) ws.run_forever() termios.tcsetattr(stdin_fd, termios.TCSADRAIN, old_tty) signal.signal(signal.SIGWINCH, old_winch_handler) def _on_ws_msg(ws, msg): if isinstance(msg, str): msg = msg.encode() sys.stdout.buffer.write(msg) sys.stdout.flush() def _capture_stdin(getch_func, buff, lock): # this method will fill up the buffer from one thread (using the lock) while True: try: x = getch_func() lock.acquire() buff.extend(x) lock.release() finally: if lock.locked(): lock.release() def _flush_stdin(ws, buff, lock): # this method will flush the buffer out to the websocket (using the lock) while True: time.sleep(0.01) try: if not buff: continue lock.acquire() x = bytes(buff) buff.clear() lock.release() ws.send(x, opcode=0x2) # OPCODE_BINARY = 0x2 except (OSError, IOError, websocket.WebSocketConnectionClosedException) as e: if isinstance(e, websocket.WebSocketConnectionClosedException): pass elif e.errno == 9: # [Errno 9] Bad file descriptor pass elif e.args and e.args[0] == errno.EINTR: pass else: raise finally: if lock.locked(): lock.release() def _getch_windows(): while not msvcrt.kbhit(): time.sleep(0.01) return msvcrt.getch() def _getch_linux(): ch = sys.stdin.read(1) return ch.encode() def attach_to_container(cmd, resource_group_name, name, container_name=None): """Attach to a container. """ container_client = cf_container(cmd.cli_ctx) container_group_client = cf_container_groups(cmd.cli_ctx) container_group = container_group_client.get(resource_group_name, name) # If container name is not present, use the first container. if container_name is None: container_name = container_group.containers[0].name _start_streaming( terminate_condition=_is_container_terminated, terminate_condition_args=(container_group_client, resource_group_name, name, container_name), shupdown_grace_period=5, stream_target=_stream_container_events_and_logs, stream_args=(container_group_client, container_client, resource_group_name, name, container_name)) def _start_streaming(terminate_condition, terminate_condition_args, shupdown_grace_period, stream_target, stream_args): """Start streaming for the stream target. """ import colorama colorama.init() try: t = threading.Thread(target=stream_target, args=stream_args) t.daemon = True t.start() while not terminate_condition(*terminate_condition_args) and t.is_alive(): time.sleep(10) time.sleep(shupdown_grace_period) finally: colorama.deinit() def _stream_logs(client, resource_group_name, name, container_name, restart_policy): """Stream logs for a container. """ lastOutputLines = 0 while True: log = client.list_logs(resource_group_name, name, container_name) lines = log.content.split('\n') currentOutputLines = len(lines) # Should only happen when the container restarts. if currentOutputLines < lastOutputLines and restart_policy != 'Never': print("Warning: you're having '--restart-policy={}'; the container '{}' was just restarted; the tail of the current log might be missing. Exiting...".format(restart_policy, container_name)) break _move_console_cursor_up(lastOutputLines) print(log.content) lastOutputLines = currentOutputLines time.sleep(2) def _stream_container_events_and_logs(container_group_client, container_client, resource_group_name, name, container_name): """Stream container events and logs. """ lastOutputLines = 0 lastContainerState = None while True: container_group, container = _find_container(container_group_client, resource_group_name, name, container_name) container_state = 'Unknown' if container.instance_view and container.instance_view.current_state and container.instance_view.current_state.state: container_state = container.instance_view.current_state.state _move_console_cursor_up(lastOutputLines) if container_state != lastContainerState: print("Container '{}' is in state '{}'...".format(container_name, container_state)) currentOutputLines = 0 if container.instance_view and container.instance_view.events: for event in sorted(container.instance_view.events, key=lambda e: e.last_timestamp): print('(count: {}) (last timestamp: {}) {}'.format(event.count, event.last_timestamp, event.message)) currentOutputLines += 1 lastOutputLines = currentOutputLines lastContainerState = container_state if container_state == 'Running': print('\nStart streaming logs:') break time.sleep(2) _stream_logs(container_client, resource_group_name, name, container_name, container_group.restart_policy) def _is_container_terminated(client, resource_group_name, name, container_name): """Check if a container should be considered terminated. """ container_group, container = _find_container(client, resource_group_name, name, container_name) # If a container group is terminated, assume the container is also terminated. if container_group.instance_view and container_group.instance_view.state: if container_group.instance_view.state == 'Succeeded' or container_group.instance_view.state == 'Failed': return True # If the restart policy is Always, assume the container will be restarted. if container_group.restart_policy: if container_group.restart_policy == 'Always': return False # Only assume the container is terminated if its state is Terminated. if container.instance_view and container.instance_view.current_state and container.instance_view.current_state.state == 'Terminated': return True return False def _find_container(client, resource_group_name, name, container_name): """Find a container in a container group. """ container_group = client.get(resource_group_name, name) containers = [c for c in container_group.containers if c.name == container_name] if len(containers) != 1: raise CLIError("Found 0 or more than 1 container with name '{}'".format(container_name)) return container_group, containers[0] def _move_console_cursor_up(lines): """Move console cursor up. """ if lines > 0: # Use stdout.write to support Python 2 sys.stdout.write('\033[{}A\033[K\033[J'.format(lines)) def _gen_guid(): import uuid return uuid.uuid4()
mtg_goldfish_parser.py
from typing import Collection from bs4 import BeautifulSoup import requests import re from .deck import Deck from threading import Thread class Goldfish_Parser(): def __init__(self, my_collection): self.decks = [] self.my_coll = my_collection def get_mtg_goldfish_deck_links(self): page = requests.get("https://www.mtggoldfish.com/metagame/standard/full#paper") soup = BeautifulSoup(page.text, 'html.parser') threads = [] for chunk in soup.find_all(class_='archetype-tile'): thread = Thread(target=self.get_single_deck, args=(chunk,)) threads.append(thread) thread.start() for thread in threads: thread.join() threads = [] for deck in self.decks: thread = Thread(target=deck.check_owned_cards, args=(self.my_coll,)) threads.append(thread) thread.start() for thread in threads: thread.join() self.remove_dup_decks() def get_single_deck(self, chunk): curr_deck = chunk.find('a') statistics = chunk.find(class_='archetype-tile-statistic-value').text deck_name = curr_deck.find(class_="sr-only").text meta_percentage_unformated = statistics if "%" in statistics else "N/A" meta_percentage = meta_percentage_unformated.strip('\n').split('\n')[0] deck_link = curr_deck.get("href") if not "/archetype/standard-other-eld" in deck_link: deck_cards = self.get_mtg_goldfish_deck_cards(deck_link) new_deck = Deck(meta_percentage, deck_name, deck_cards) self.decks.append(new_deck) new_deck.check_owned_cards(self.my_coll) def get_mtg_goldfish_deck_cards(self, deck_link): cards = {} if "archetype" in deck_link: page = requests.get('https://www.mtggoldfish.com/'+ str(deck_link)) soup = BeautifulSoup(page.text, 'html.parser') blob = soup.find('a', text = re.compile(r'^Text File \(Default\)$'), attrs = {'class' : 'dropdown-item'}) deck_link = blob.get("href") if not "download" in deck_link: lnk = deck_link.split("/", 3) deck_link = "/"+lnk[1]+"/download/"+lnk[2] txt_of_deck = requests.get("https://www.mtggoldfish.com"+str(deck_link)).text for card in txt_of_deck.split("\r"): if not card.isspace(): lst = card.split(" ", 1) card_name = lst[1] card_count = lst[0].split("\n")[1] if "\n" in lst[0] else lst[0] cards[card_name] = card_count return cards def remove_dup_decks(self): pruned_decks = [] seen_decks = [] for deck in self.decks: if deck.name not in seen_decks: pruned_decks.append(deck) seen_decks.append(deck.name) return pruned_decks def get_goldfish_decks(self): return self.decks
test_client.py
import asyncio from collections import deque from contextlib import suppress from functools import partial import gc import logging from operator import add import os import pickle import psutil import random import subprocess import sys import threading from threading import Semaphore from time import sleep import traceback import warnings import weakref import zipfile import pytest from tlz import identity, isdistinct, concat, pluck, valmap, first, merge import dask from dask import delayed from dask.optimization import SubgraphCallable import dask.bag as db from distributed import ( Worker, Nanny, fire_and_forget, LocalCluster, get_client, secede, get_worker, Executor, profile, performance_report, TimeoutError, CancelledError, ) from distributed.core import Status from distributed.comm import CommClosedError from distributed.client import ( Client, Future, wait, as_completed, tokenize, _get_global_client, default_client, futures_of, temp_default_client, ) from distributed.compatibility import WINDOWS from distributed.metrics import time from distributed.scheduler import Scheduler, KilledWorker from distributed.sizeof import sizeof from distributed.utils import mp_context, sync, tmp_text, tokey, tmpfile, is_valid_xml from distributed.utils_test import ( cluster, slowinc, slowadd, slowdec, randominc, inc, dec, div, throws, geninc, asyncinc, gen_cluster, gen_test, double, popen, captured_logger, varying, map_varying, wait_for, async_wait_for, pristine_loop, save_sys_modules, ) from distributed.utils_test import ( # noqa: F401 client as c, client_secondary as c2, cleanup, cluster_fixture, loop, loop_in_thread, nodebug, s, a, b, ) @gen_cluster(client=True, timeout=None) async def test_submit(c, s, a, b): x = c.submit(inc, 10) assert not x.done() assert isinstance(x, Future) assert x.client is c result = await x assert result == 11 assert x.done() y = c.submit(inc, 20) z = c.submit(add, x, y) result = await z assert result == 11 + 21 s.validate_state() @gen_cluster(client=True) async def test_map(c, s, a, b): L1 = c.map(inc, range(5)) assert len(L1) == 5 assert isdistinct(x.key for x in L1) assert all(isinstance(x, Future) for x in L1) result = await L1[0] assert result == inc(0) assert len(s.tasks) == 5 L2 = c.map(inc, L1) result = await L2[1] assert result == inc(inc(1)) assert len(s.tasks) == 10 # assert L1[0].key in s.tasks[L2[0].key] total = c.submit(sum, L2) result = await total assert result == sum(map(inc, map(inc, range(5)))) L3 = c.map(add, L1, L2) result = await L3[1] assert result == inc(1) + inc(inc(1)) L4 = c.map(add, range(3), range(4)) results = await c.gather(L4) assert results == list(map(add, range(3), range(4))) def f(x, y=10): return x + y L5 = c.map(f, range(5), y=5) results = await c.gather(L5) assert results == list(range(5, 10)) y = c.submit(f, 10) L6 = c.map(f, range(5), y=y) results = await c.gather(L6) assert results == list(range(20, 25)) s.validate_state() @gen_cluster(client=True) async def test_map_empty(c, s, a, b): L1 = c.map(inc, [], pure=False) assert len(L1) == 0 results = await c.gather(L1) assert results == [] @gen_cluster(client=True) async def test_map_keynames(c, s, a, b): futures = c.map(inc, range(4), key="INC") assert all(f.key.startswith("INC") for f in futures) assert isdistinct(f.key for f in futures) futures2 = c.map(inc, [5, 6, 7, 8], key="INC") assert [f.key for f in futures] != [f.key for f in futures2] keys = ["inc-1", "inc-2", "inc-3", "inc-4"] futures = c.map(inc, range(4), key=keys) assert [f.key for f in futures] == keys @gen_cluster(client=True) async def test_map_retries(c, s, a, b): args = [ [ZeroDivisionError("one"), 2, 3], [4, 5, 6], [ZeroDivisionError("seven"), ZeroDivisionError("eight"), 9], ] x, y, z = c.map(*map_varying(args), retries=2) assert await x == 2 assert await y == 4 assert await z == 9 x, y, z = c.map(*map_varying(args), retries=1, pure=False) assert await x == 2 assert await y == 4 with pytest.raises(ZeroDivisionError, match="eight"): await z x, y, z = c.map(*map_varying(args), retries=0, pure=False) with pytest.raises(ZeroDivisionError, match="one"): await x assert await y == 4 with pytest.raises(ZeroDivisionError, match="seven"): await z @gen_cluster(client=True) async def test_map_batch_size(c, s, a, b): result = c.map(inc, range(100), batch_size=10) result = await c.gather(result) assert result == list(range(1, 101)) result = c.map(add, range(100), range(100), batch_size=10) result = await c.gather(result) assert result == list(range(0, 200, 2)) # mismatch shape result = c.map(add, range(100, 200), range(10), batch_size=2) result = await c.gather(result) assert result == list(range(100, 120, 2)) @gen_cluster(client=True) async def test_compute_retries(c, s, a, b): args = [ZeroDivisionError("one"), ZeroDivisionError("two"), 3] # Sanity check for varying() use x = c.compute(delayed(varying(args))()) with pytest.raises(ZeroDivisionError, match="one"): await x # Same retries for all x = c.compute(delayed(varying(args))(), retries=1) with pytest.raises(ZeroDivisionError, match="two"): await x x = c.compute(delayed(varying(args))(), retries=2) assert await x == 3 args.append(4) x = c.compute(delayed(varying(args))(), retries=2) assert await x == 3 # Per-future retries xargs = [ZeroDivisionError("one"), ZeroDivisionError("two"), 30, 40] yargs = [ZeroDivisionError("five"), ZeroDivisionError("six"), 70] zargs = [80, 90, 100] x, y = [delayed(varying(args))() for args in (xargs, yargs)] x, y = c.compute([x, y], retries={x: 2}) gc.collect() assert await x == 30 with pytest.raises(ZeroDivisionError, match="five"): await y x, y, z = [delayed(varying(args))() for args in (xargs, yargs, zargs)] x, y, z = c.compute([x, y, z], retries={(y, z): 2}) with pytest.raises(ZeroDivisionError, match="one"): await x assert await y == 70 assert await z == 80 def test_retries_get(c): args = [ZeroDivisionError("one"), ZeroDivisionError("two"), 3] x = delayed(varying(args))() assert x.compute(retries=5) == 3 args = [ZeroDivisionError("one"), ZeroDivisionError("two"), 3] x = delayed(varying(args))() with pytest.raises(ZeroDivisionError): x.compute() @gen_cluster(client=True) async def test_compute_persisted_retries(c, s, a, b): args = [ZeroDivisionError("one"), ZeroDivisionError("two"), 3] # Sanity check x = c.persist(delayed(varying(args))()) fut = c.compute(x) with pytest.raises(ZeroDivisionError, match="one"): await fut x = c.persist(delayed(varying(args))()) fut = c.compute(x, retries=1) with pytest.raises(ZeroDivisionError, match="two"): await fut x = c.persist(delayed(varying(args))()) fut = c.compute(x, retries=2) assert await fut == 3 args.append(4) x = c.persist(delayed(varying(args))()) fut = c.compute(x, retries=3) assert await fut == 3 @gen_cluster(client=True) async def test_persist_retries(c, s, a, b): # Same retries for all args = [ZeroDivisionError("one"), ZeroDivisionError("two"), 3] x = c.persist(delayed(varying(args))(), retries=1) x = c.compute(x) with pytest.raises(ZeroDivisionError, match="two"): await x x = c.persist(delayed(varying(args))(), retries=2) x = c.compute(x) assert await x == 3 # Per-key retries xargs = [ZeroDivisionError("one"), ZeroDivisionError("two"), 30, 40] yargs = [ZeroDivisionError("five"), ZeroDivisionError("six"), 70] zargs = [80, 90, 100] x, y, z = [delayed(varying(args))() for args in (xargs, yargs, zargs)] x, y, z = c.persist([x, y, z], retries={(y, z): 2}) x, y, z = c.compute([x, y, z]) with pytest.raises(ZeroDivisionError, match="one"): await x assert await y == 70 assert await z == 80 @gen_cluster(client=True) async def test_retries_dask_array(c, s, a, b): da = pytest.importorskip("dask.array") x = da.ones((10, 10), chunks=(3, 3)) future = c.compute(x.sum(), retries=2) y = await future assert y == 100 @gen_cluster(client=True) async def test_future_repr(c, s, a, b): pd = pytest.importorskip("pandas") x = c.submit(inc, 10) y = c.submit(pd.DataFrame, {"x": [1, 2, 3]}) await x await y for func in [repr, lambda x: x._repr_html_()]: assert str(x.key) in func(x) assert str(x.status) in func(x) assert str(x.status) in repr(c.futures[x.key]) assert "int" in func(x) assert "pandas" in func(y) assert "DataFrame" in func(y) @gen_cluster(client=True) async def test_future_tuple_repr(c, s, a, b): da = pytest.importorskip("dask.array") y = da.arange(10, chunks=(5,)).persist() f = futures_of(y)[0] for func in [repr, lambda x: x._repr_html_()]: for k in f.key: assert str(k) in func(f) @gen_cluster(client=True) async def test_Future_exception(c, s, a, b): x = c.submit(div, 1, 0) result = await x.exception() assert isinstance(result, ZeroDivisionError) x = c.submit(div, 1, 1) result = await x.exception() assert result is None def test_Future_exception_sync(c): x = c.submit(div, 1, 0) assert isinstance(x.exception(), ZeroDivisionError) x = c.submit(div, 1, 1) assert x.exception() is None @gen_cluster(client=True) async def test_Future_release(c, s, a, b): # Released Futures should be removed timely from the Client x = c.submit(div, 1, 1) await x x.release() await asyncio.sleep(0) assert not c.futures x = c.submit(slowinc, 1, delay=0.5) x.release() await asyncio.sleep(0) assert not c.futures x = c.submit(div, 1, 0) await x.exception() x.release() await asyncio.sleep(0) assert not c.futures def test_Future_release_sync(c): # Released Futures should be removed timely from the Client x = c.submit(div, 1, 1) x.result() x.release() wait_for(lambda: not c.futures, timeout=0.3) x = c.submit(slowinc, 1, delay=0.8) x.release() wait_for(lambda: not c.futures, timeout=0.3) x = c.submit(div, 1, 0) x.exception() x.release() wait_for(lambda: not c.futures, timeout=0.3) def test_short_tracebacks(loop, c): tblib = pytest.importorskip("tblib") future = c.submit(div, 1, 0) try: future.result() except Exception: _, _, tb = sys.exc_info() tb = tblib.Traceback(tb).to_dict() n = 0 while tb is not None: n += 1 tb = tb["tb_next"] assert n < 5 @gen_cluster(client=True) async def test_map_naming(c, s, a, b): L1 = c.map(inc, range(5)) L2 = c.map(inc, range(5)) assert [x.key for x in L1] == [x.key for x in L2] L3 = c.map(inc, [1, 1, 1, 1]) assert len({x._state for x in L3}) == 1 L4 = c.map(inc, [1, 1, 1, 1], pure=False) assert len({x._state for x in L4}) == 4 @gen_cluster(client=True) async def test_submit_naming(c, s, a, b): a = c.submit(inc, 1) b = c.submit(inc, 1) assert a._state is b._state c = c.submit(inc, 1, pure=False) assert c.key != a.key @gen_cluster(client=True) async def test_exceptions(c, s, a, b): x = c.submit(div, 1, 2) result = await x assert result == 1 / 2 x = c.submit(div, 1, 0) with pytest.raises(ZeroDivisionError): await x x = c.submit(div, 10, 2) # continues to operate result = await x assert result == 10 / 2 @gen_cluster() async def test_gc(s, a, b): c = await Client(s.address, asynchronous=True) x = c.submit(inc, 10) await x assert s.tasks[x.key].who_has x.__del__() await async_wait_for( lambda: x.key not in s.tasks or not s.tasks[x.key].who_has, timeout=0.3 ) await c.close() def test_thread(c): x = c.submit(inc, 1) assert x.result() == 2 x = c.submit(slowinc, 1, delay=0.3) with pytest.raises(TimeoutError): x.result(timeout="10 ms") assert x.result() == 2 def test_sync_exceptions(c): x = c.submit(div, 10, 2) assert x.result() == 5 y = c.submit(div, 10, 0) try: y.result() assert False except ZeroDivisionError: pass z = c.submit(div, 10, 5) assert z.result() == 2 @gen_cluster(client=True) async def test_gather(c, s, a, b): x = c.submit(inc, 10) y = c.submit(inc, x) result = await c.gather(x) assert result == 11 result = await c.gather([x]) assert result == [11] result = await c.gather({"x": x, "y": [y]}) assert result == {"x": 11, "y": [12]} @gen_cluster(client=True) async def test_gather_lost(c, s, a, b): [x] = await c.scatter([1], workers=a.address) y = c.submit(inc, 1, workers=b.address) await a.close() with pytest.raises(Exception): await c.gather([x, y]) def test_gather_sync(c): x = c.submit(inc, 1) assert c.gather(x) == 2 y = c.submit(div, 1, 0) with pytest.raises(ZeroDivisionError): c.gather([x, y]) [xx] = c.gather([x, y], errors="skip") assert xx == 2 @gen_cluster(client=True) async def test_gather_strict(c, s, a, b): x = c.submit(div, 2, 1) y = c.submit(div, 1, 0) with pytest.raises(ZeroDivisionError): await c.gather([x, y]) [xx] = await c.gather([x, y], errors="skip") assert xx == 2 @gen_cluster(client=True, nthreads=[("127.0.0.1", 1)]) async def test_gather_skip(c, s, a): x = c.submit(div, 1, 0, priority=10) y = c.submit(slowinc, 1, delay=0.5) with captured_logger(logging.getLogger("distributed.scheduler")) as sched: with captured_logger(logging.getLogger("distributed.client")) as client: L = await c.gather([x, y], errors="skip") assert L == [2] assert not client.getvalue() assert not sched.getvalue() @gen_cluster(client=True) async def test_limit_concurrent_gathering(c, s, a, b): futures = c.map(inc, range(100)) await c.gather(futures) assert len(a.outgoing_transfer_log) + len(b.outgoing_transfer_log) < 100 @gen_cluster(client=True, timeout=None) async def test_get(c, s, a, b): future = c.get({"x": (inc, 1)}, "x", sync=False) assert isinstance(future, Future) result = await future assert result == 2 futures = c.get({"x": (inc, 1)}, ["x"], sync=False) assert isinstance(futures[0], Future) result = await c.gather(futures) assert result == [2] futures = c.get({}, [], sync=False) result = await c.gather(futures) assert result == [] result = await c.get( {("x", 1): (inc, 1), ("x", 2): (inc, ("x", 1))}, ("x", 2), sync=False ) assert result == 3 def test_get_sync(c): assert c.get({"x": (inc, 1)}, "x") == 2 def test_no_future_references(c): from weakref import WeakSet ws = WeakSet() futures = c.map(inc, range(10)) ws.update(futures) del futures import gc gc.collect() start = time() while list(ws): sleep(0.01) assert time() < start + 2 def test_get_sync_optimize_graph_passes_through(c): bag = db.range(10, npartitions=3).map(inc) dask.compute(bag.sum(), optimize_graph=False) @gen_cluster(client=True) async def test_gather_errors(c, s, a, b): def f(a, b): raise TypeError def g(a, b): raise AttributeError future_f = c.submit(f, 1, 2) future_g = c.submit(g, 1, 2) with pytest.raises(TypeError): await c.gather(future_f) with pytest.raises(AttributeError): await c.gather(future_g) await a.close() @gen_cluster(client=True) async def test_wait(c, s, a, b): x = c.submit(inc, 1) y = c.submit(inc, 1) z = c.submit(inc, 2) done, not_done = await wait([x, y, z]) assert done == {x, y, z} assert not_done == set() assert x.status == y.status == "finished" @gen_cluster(client=True) async def test_wait_first_completed(c, s, a, b): x = c.submit(slowinc, 1) y = c.submit(slowinc, 1) z = c.submit(inc, 2) done, not_done = await wait([x, y, z], return_when="FIRST_COMPLETED") assert done == {z} assert not_done == {x, y} assert z.status == "finished" assert x.status == "pending" assert y.status == "pending" @gen_cluster(client=True, timeout=2) async def test_wait_timeout(c, s, a, b): future = c.submit(sleep, 0.3) with pytest.raises(TimeoutError): await wait(future, timeout=0.01) def test_wait_sync(c): x = c.submit(inc, 1) y = c.submit(inc, 2) done, not_done = wait([x, y]) assert done == {x, y} assert not_done == set() assert x.status == y.status == "finished" future = c.submit(sleep, 0.3) with pytest.raises(TimeoutError): wait(future, timeout=0.01) def test_wait_informative_error_for_timeouts(c): x = c.submit(inc, 1) y = c.submit(inc, 2) try: wait(x, y) except Exception as e: assert "timeout" in str(e) assert "list" in str(e) @gen_cluster(client=True) async def test_garbage_collection(c, s, a, b): x = c.submit(inc, 1) y = c.submit(inc, 1) assert c.refcount[x.key] == 2 x.__del__() await asyncio.sleep(0) assert c.refcount[x.key] == 1 z = c.submit(inc, y) y.__del__() await asyncio.sleep(0) result = await z assert result == 3 ykey = y.key y.__del__() await asyncio.sleep(0) assert ykey not in c.futures @gen_cluster(client=True) async def test_garbage_collection_with_scatter(c, s, a, b): [future] = await c.scatter([1]) assert future.key in c.futures assert future.status == "finished" assert s.who_wants[future.key] == {c.id} key = future.key assert c.refcount[key] == 1 future.__del__() await asyncio.sleep(0) assert c.refcount[key] == 0 start = time() while True: if key not in s.tasks or not s.tasks[key].who_has: break else: assert time() < start + 3 await asyncio.sleep(0.1) @gen_cluster(timeout=1000, client=True) async def test_recompute_released_key(c, s, a, b): x = c.submit(inc, 100) result1 = await x xkey = x.key del x import gc gc.collect() await asyncio.sleep(0) assert c.refcount[xkey] == 0 # 1 second batching needs a second action to trigger while xkey in s.tasks and s.tasks[xkey].who_has or xkey in a.data or xkey in b.data: await asyncio.sleep(0.1) x = c.submit(inc, 100) assert x.key in c.futures result2 = await x assert result1 == result2 @pytest.mark.slow @gen_cluster(client=True) async def test_long_tasks_dont_trigger_timeout(c, s, a, b): from time import sleep x = c.submit(sleep, 3) await x @pytest.mark.skip @gen_cluster(client=True) async def test_missing_data_heals(c, s, a, b): a.validate = False b.validate = False x = c.submit(inc, 1) y = c.submit(inc, x) z = c.submit(inc, y) await wait([x, y, z]) # Secretly delete y's key if y.key in a.data: del a.data[y.key] a.release_key(y.key) if y.key in b.data: del b.data[y.key] b.release_key(y.key) await asyncio.sleep(0) w = c.submit(add, y, z) result = await w assert result == 3 + 4 @pytest.mark.skip @gen_cluster(client=True) async def test_gather_robust_to_missing_data(c, s, a, b): a.validate = False b.validate = False x, y, z = c.map(inc, range(3)) await wait([x, y, z]) # everything computed for f in [x, y]: for w in [a, b]: if f.key in w.data: del w.data[f.key] await asyncio.sleep(0) w.release_key(f.key) xx, yy, zz = await c.gather([x, y, z]) assert (xx, yy, zz) == (1, 2, 3) @pytest.mark.skip @gen_cluster(client=True) async def test_gather_robust_to_nested_missing_data(c, s, a, b): a.validate = False b.validate = False w = c.submit(inc, 1) x = c.submit(inc, w) y = c.submit(inc, x) z = c.submit(inc, y) await wait([z]) for worker in [a, b]: for datum in [y, z]: if datum.key in worker.data: del worker.data[datum.key] await asyncio.sleep(0) worker.release_key(datum.key) result = await c.gather([z]) assert result == [inc(inc(inc(inc(1))))] @gen_cluster(client=True) async def test_tokenize_on_futures(c, s, a, b): x = c.submit(inc, 1) y = c.submit(inc, 1) tok = tokenize(x) assert tokenize(x) == tokenize(x) assert tokenize(x) == tokenize(y) c.futures[x.key].finish() assert tok == tokenize(y) @pytest.mark.skipif( not sys.platform.startswith("linux"), reason="Need 127.0.0.2 to mean localhost" ) @gen_cluster([("127.0.0.1", 1), ("127.0.0.2", 2)], client=True) async def test_restrictions_submit(c, s, a, b): x = c.submit(inc, 1, workers={a.ip}) y = c.submit(inc, x, workers={b.ip}) await wait([x, y]) assert s.host_restrictions[x.key] == {a.ip} assert x.key in a.data assert s.host_restrictions[y.key] == {b.ip} assert y.key in b.data @gen_cluster(client=True) async def test_restrictions_ip_port(c, s, a, b): x = c.submit(inc, 1, workers={a.address}) y = c.submit(inc, x, workers={b.address}) await wait([x, y]) assert s.worker_restrictions[x.key] == {a.address} assert x.key in a.data assert s.worker_restrictions[y.key] == {b.address} assert y.key in b.data @gen_cluster(client=True) async def test_restrictions_ip_port_task_key(c, s, a, b): # Create a long dependency list tasks = [delayed(inc)(1)] for _ in range(100): tasks.append(delayed(add)(tasks[-1], random.choice(tasks))) last_task = tasks[-1] # calculate all dependency keys all_tasks = list(last_task.__dask_graph__()) # only restrict to a single worker workers = {d: a.address for d in all_tasks} result = c.compute(last_task, workers=workers) await result # all tasks should have been calculated by the first worker for task in tasks: assert s.worker_restrictions[task.key] == {a.address} # and the data should also be there assert last_task.key in a.data assert last_task.key not in b.data @pytest.mark.skipif( not sys.platform.startswith("linux"), reason="Need 127.0.0.2 to mean localhost" ) @gen_cluster([("127.0.0.1", 1), ("127.0.0.2", 2)], client=True) async def test_restrictions_map(c, s, a, b): L = c.map(inc, range(5), workers={a.ip}) await wait(L) assert set(a.data) == {x.key for x in L} assert not b.data for x in L: assert s.host_restrictions[x.key] == {a.ip} L = c.map(inc, [10, 11, 12], workers=[{a.ip}, {a.ip, b.ip}, {b.ip}]) await wait(L) assert s.host_restrictions[L[0].key] == {a.ip} assert s.host_restrictions[L[1].key] == {a.ip, b.ip} assert s.host_restrictions[L[2].key] == {b.ip} with pytest.raises(ValueError): c.map(inc, [10, 11, 12], workers=[{a.ip}]) @pytest.mark.skipif( not sys.platform.startswith("linux"), reason="Need 127.0.0.2 to mean localhost" ) @gen_cluster([("127.0.0.1", 1), ("127.0.0.2", 2)], client=True) async def test_restrictions_get(c, s, a, b): dsk = {"x": 1, "y": (inc, "x"), "z": (inc, "y")} restrictions = {"y": {a.ip}, "z": {b.ip}} futures = c.get(dsk, ["y", "z"], restrictions, sync=False) result = await c.gather(futures) assert result == [2, 3] assert "y" in a.data assert "z" in b.data @gen_cluster(client=True) async def dont_test_bad_restrictions_raise_exception(c, s, a, b): z = c.submit(inc, 2, workers={"bad-address"}) try: await z assert False except ValueError as e: assert "bad-address" in str(e) assert z.key in str(e) @gen_cluster(client=True, timeout=None) async def test_remove_worker(c, s, a, b): L = c.map(inc, range(20)) await wait(L) await b.close() assert b.address not in s.workers result = await c.gather(L) assert result == list(map(inc, range(20))) @gen_cluster(nthreads=[("127.0.0.1", 1)], client=True) async def test_errors_dont_block(c, s, w): L = [c.submit(inc, 1), c.submit(throws, 1), c.submit(inc, 2), c.submit(throws, 2)] start = time() while not (L[0].status == L[2].status == "finished"): assert time() < start + 5 await asyncio.sleep(0.01) result = await c.gather([L[0], L[2]]) assert result == [2, 3] @gen_cluster(client=True) async def test_submit_quotes(c, s, a, b): def assert_list(x, z=[]): return isinstance(x, list) and isinstance(z, list) x = c.submit(assert_list, [1, 2, 3]) result = await x assert result x = c.submit(assert_list, [1, 2, 3], z=[4, 5, 6]) result = await x assert result x = c.submit(inc, 1) y = c.submit(inc, 2) z = c.submit(assert_list, [x, y]) result = await z assert result @gen_cluster(client=True) async def test_map_quotes(c, s, a, b): def assert_list(x, z=[]): return isinstance(x, list) and isinstance(z, list) L = c.map(assert_list, [[1, 2, 3], [4]]) result = await c.gather(L) assert all(result) L = c.map(assert_list, [[1, 2, 3], [4]], z=[10]) result = await c.gather(L) assert all(result) L = c.map(assert_list, [[1, 2, 3], [4]], [[]] * 3) result = await c.gather(L) assert all(result) @gen_cluster() async def test_two_consecutive_clients_share_results(s, a, b): c = await Client(s.address, asynchronous=True) x = c.submit(random.randint, 0, 1000, pure=True) xx = await x f = await Client(s.address, asynchronous=True) y = f.submit(random.randint, 0, 1000, pure=True) yy = await y assert xx == yy await c.close() await f.close() @gen_cluster(client=True) async def test_submit_then_get_with_Future(c, s, a, b): x = c.submit(slowinc, 1) dsk = {"y": (inc, x)} result = await c.get(dsk, "y", sync=False) assert result == 3 @gen_cluster(client=True) async def test_aliases(c, s, a, b): x = c.submit(inc, 1) dsk = {"y": x} result = await c.get(dsk, "y", sync=False) assert result == 2 @gen_cluster(client=True) async def test_aliases_2(c, s, a, b): dsk_keys = [ ({"x": (inc, 1), "y": "x", "z": "x", "w": (add, "y", "z")}, ["y", "w"]), ({"x": "y", "y": 1}, ["x"]), ({"x": 1, "y": "x", "z": "y", "w": (inc, "z")}, ["w"]), ] for dsk, keys in dsk_keys: result = await c.gather(c.get(dsk, keys, sync=False)) assert list(result) == list(dask.get(dsk, keys)) await asyncio.sleep(0) @gen_cluster(client=True) async def test_scatter(c, s, a, b): d = await c.scatter({"y": 20}) assert isinstance(d["y"], Future) assert a.data.get("y") == 20 or b.data.get("y") == 20 y_who_has = s.get_who_has(keys=["y"])["y"] assert a.address in y_who_has or b.address in y_who_has assert s.get_nbytes(summary=False) == {"y": sizeof(20)} yy = await c.gather([d["y"]]) assert yy == [20] [x] = await c.scatter([10]) assert isinstance(x, Future) assert a.data.get(x.key) == 10 or b.data.get(x.key) == 10 xx = await c.gather([x]) x_who_has = s.get_who_has(keys=[x.key])[x.key] assert s.tasks[x.key].who_has assert ( s.workers[a.address] in s.tasks[x.key].who_has or s.workers[b.address] in s.tasks[x.key].who_has ) assert s.get_nbytes(summary=False) == {"y": sizeof(20), x.key: sizeof(10)} assert xx == [10] z = c.submit(add, x, d["y"]) # submit works on Future result = await z assert result == 10 + 20 result = await c.gather([z, x]) assert result == [30, 10] @gen_cluster(client=True) async def test_scatter_types(c, s, a, b): d = await c.scatter({"x": 1}) assert isinstance(d, dict) assert list(d) == ["x"] for seq in [[1], (1,), {1}, frozenset([1])]: L = await c.scatter(seq) assert isinstance(L, type(seq)) assert len(L) == 1 s.validate_state() seq = await c.scatter(range(5)) assert isinstance(seq, list) assert len(seq) == 5 s.validate_state() @gen_cluster(client=True) async def test_scatter_non_list(c, s, a, b): x = await c.scatter(1) assert isinstance(x, Future) result = await x assert result == 1 @gen_cluster(client=True) async def test_scatter_hash(c, s, a, b): [a] = await c.scatter([1]) [b] = await c.scatter([1]) assert a.key == b.key s.validate_state() @gen_cluster(client=True) async def test_scatter_tokenize_local(c, s, a, b): from dask.base import normalize_token class MyObj: pass L = [] @normalize_token.register(MyObj) def f(x): L.append(x) return "x" obj = MyObj() future = await c.scatter(obj) assert L and L[0] is obj @gen_cluster(client=True) async def test_scatter_singletons(c, s, a, b): np = pytest.importorskip("numpy") pd = pytest.importorskip("pandas") for x in [1, np.ones(5), pd.DataFrame({"x": [1, 2, 3]})]: future = await c.scatter(x) result = await future assert str(result) == str(x) @gen_cluster(client=True) async def test_scatter_typename(c, s, a, b): future = await c.scatter(123) assert future.key.startswith("int") @gen_cluster(client=True) async def test_scatter_hash(c, s, a, b): x = await c.scatter(123) y = await c.scatter(123) assert x.key == y.key z = await c.scatter(123, hash=False) assert z.key != y.key @gen_cluster(client=True) async def test_get_releases_data(c, s, a, b): await c.gather(c.get({"x": (inc, 1)}, ["x"], sync=False)) import gc gc.collect() start = time() while c.refcount["x"]: await asyncio.sleep(0.01) assert time() < start + 2 def test_current(s, a, b): with Client(s["address"]) as c: assert Client.current() is c with pytest.raises(ValueError): Client.current() with Client(s["address"]) as c: assert Client.current() is c def test_global_clients(loop): assert _get_global_client() is None with pytest.raises(ValueError): default_client() with cluster() as (s, [a, b]): with Client(s["address"], loop=loop) as c: assert _get_global_client() is c assert default_client() is c with Client(s["address"], loop=loop) as f: assert _get_global_client() is f assert default_client() is f assert default_client(c) is c assert default_client(f) is f assert _get_global_client() is None @gen_cluster(client=True) async def test_exception_on_exception(c, s, a, b): x = c.submit(lambda: 1 / 0) y = c.submit(inc, x) with pytest.raises(ZeroDivisionError): await y z = c.submit(inc, y) with pytest.raises(ZeroDivisionError): await z @gen_cluster(client=True) async def test_get_nbytes(c, s, a, b): [x] = await c.scatter([1]) assert s.get_nbytes(summary=False) == {x.key: sizeof(1)} y = c.submit(inc, x) await y assert s.get_nbytes(summary=False) == {x.key: sizeof(1), y.key: sizeof(2)} @pytest.mark.skipif( not sys.platform.startswith("linux"), reason="Need 127.0.0.2 to mean localhost" ) @gen_cluster([("127.0.0.1", 1), ("127.0.0.2", 2)], client=True) async def test_nbytes_determines_worker(c, s, a, b): x = c.submit(identity, 1, workers=[a.ip]) y = c.submit(identity, tuple(range(100)), workers=[b.ip]) await c.gather([x, y]) z = c.submit(lambda x, y: None, x, y) await z assert s.tasks[z.key].who_has == {s.workers[b.address]} @gen_cluster(client=True) async def test_if_intermediates_clear_on_error(c, s, a, b): x = delayed(div, pure=True)(1, 0) y = delayed(div, pure=True)(1, 2) z = delayed(add, pure=True)(x, y) f = c.compute(z) with pytest.raises(ZeroDivisionError): await f s.validate_state() assert not any(ts.who_has for ts in s.tasks.values()) @gen_cluster( client=True, config={"distributed.scheduler.default-task-durations": {"f": "1ms"}} ) async def test_pragmatic_move_small_data_to_large_data(c, s, a, b): np = pytest.importorskip("numpy") lists = c.map(np.ones, [10000] * 10, pure=False) sums = c.map(np.sum, lists) total = c.submit(sum, sums) def f(x, y): return None results = c.map(f, lists, [total] * 10) await wait([total]) await wait(results) assert ( sum( s.tasks[r.key].who_has.issubset(s.tasks[l.key].who_has) for l, r in zip(lists, results) ) >= 9 ) @gen_cluster(client=True) async def test_get_with_non_list_key(c, s, a, b): dsk = {("x", 0): (inc, 1), 5: (inc, 2)} x = await c.get(dsk, ("x", 0), sync=False) y = await c.get(dsk, 5, sync=False) assert x == 2 assert y == 3 @gen_cluster(client=True) async def test_get_with_error(c, s, a, b): dsk = {"x": (div, 1, 0), "y": (inc, "x")} with pytest.raises(ZeroDivisionError): await c.get(dsk, "y", sync=False) def test_get_with_error_sync(c): dsk = {"x": (div, 1, 0), "y": (inc, "x")} with pytest.raises(ZeroDivisionError): c.get(dsk, "y") @gen_cluster(client=True) async def test_directed_scatter(c, s, a, b): await c.scatter([1, 2, 3], workers=[a.address]) assert len(a.data) == 3 assert not b.data await c.scatter([4, 5], workers=[b.name]) assert len(b.data) == 2 def test_directed_scatter_sync(c, s, a, b, loop): futures = c.scatter([1, 2, 3], workers=[b["address"]]) has_what = sync(loop, c.scheduler.has_what) assert len(has_what[b["address"]]) == len(futures) assert len(has_what[a["address"]]) == 0 @gen_cluster(client=True) async def test_scatter_direct(c, s, a, b): future = await c.scatter(123, direct=True) assert future.key in a.data or future.key in b.data assert s.tasks[future.key].who_has assert future.status == "finished" result = await future assert result == 123 assert not s.counters["op"].components[0]["scatter"] result = await future assert not s.counters["op"].components[0]["gather"] result = await c.gather(future) assert not s.counters["op"].components[0]["gather"] @gen_cluster(client=True) async def test_scatter_direct_numpy(c, s, a, b): np = pytest.importorskip("numpy") x = np.ones(5) future = await c.scatter(x, direct=True) result = await future assert np.allclose(x, result) assert not s.counters["op"].components[0]["scatter"] @gen_cluster(client=True) async def test_scatter_direct_broadcast(c, s, a, b): future2 = await c.scatter(456, direct=True, broadcast=True) assert future2.key in a.data assert future2.key in b.data assert s.tasks[future2.key].who_has == {s.workers[a.address], s.workers[b.address]} result = await future2 assert result == 456 assert not s.counters["op"].components[0]["scatter"] @gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 4) async def test_scatter_direct_balanced(c, s, *workers): futures = await c.scatter([1, 2, 3], direct=True) assert sorted([len(w.data) for w in workers]) == [0, 1, 1, 1] @gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 4) async def test_scatter_direct_broadcast_target(c, s, *workers): futures = await c.scatter([123, 456], direct=True, workers=workers[0].address) assert futures[0].key in workers[0].data assert futures[1].key in workers[0].data futures = await c.scatter( [123, 456], direct=True, broadcast=True, workers=[w.address for w in workers[:3]], ) assert ( f.key in w.data and w.address in s.tasks[f.key].who_has for f in futures for w in workers[:3] ) @gen_cluster(client=True, nthreads=[]) async def test_scatter_direct_empty(c, s): with pytest.raises((ValueError, TimeoutError)): await c.scatter(123, direct=True, timeout=0.1) @gen_cluster(client=True, timeout=None, nthreads=[("127.0.0.1", 1)] * 5) async def test_scatter_direct_spread_evenly(c, s, *workers): futures = [] for i in range(10): future = await c.scatter(i, direct=True) futures.append(future) assert all(w.data for w in workers) @pytest.mark.parametrize("direct", [True, False]) @pytest.mark.parametrize("broadcast", [True, False]) def test_scatter_gather_sync(c, direct, broadcast): futures = c.scatter([1, 2, 3], direct=direct, broadcast=broadcast) results = c.gather(futures, direct=direct) assert results == [1, 2, 3] delayed(inc)(1).compute(direct=direct) @gen_cluster(client=True) async def test_gather_direct(c, s, a, b): futures = await c.scatter([1, 2, 3]) data = await c.gather(futures, direct=True) assert data == [1, 2, 3] @gen_cluster(client=True) async def test_many_submits_spread_evenly(c, s, a, b): L = [c.submit(inc, i) for i in range(10)] await wait(L) assert a.data and b.data @gen_cluster(client=True) async def test_traceback(c, s, a, b): x = c.submit(div, 1, 0) tb = await x.traceback() assert any("x / y" in line for line in pluck(3, traceback.extract_tb(tb))) @gen_cluster(client=True) async def test_get_traceback(c, s, a, b): try: await c.get({"x": (div, 1, 0)}, "x", sync=False) except ZeroDivisionError: exc_type, exc_value, exc_traceback = sys.exc_info() L = traceback.format_tb(exc_traceback) assert any("x / y" in line for line in L) @gen_cluster(client=True) async def test_gather_traceback(c, s, a, b): x = c.submit(div, 1, 0) try: await c.gather(x) except ZeroDivisionError: exc_type, exc_value, exc_traceback = sys.exc_info() L = traceback.format_tb(exc_traceback) assert any("x / y" in line for line in L) def test_traceback_sync(c): x = c.submit(div, 1, 0) tb = x.traceback() assert any( "x / y" in line for line in concat(traceback.extract_tb(tb)) if isinstance(line, str) ) y = c.submit(inc, x) tb2 = y.traceback() assert set(pluck(3, traceback.extract_tb(tb2))).issuperset( set(pluck(3, traceback.extract_tb(tb))) ) z = c.submit(div, 1, 2) tb = z.traceback() assert tb is None @gen_cluster(client=True) async def test_upload_file(c, s, a, b): def g(): import myfile return myfile.f() with save_sys_modules(): for value in [123, 456]: with tmp_text("myfile.py", "def f():\n return {}".format(value)) as fn: await c.upload_file(fn) x = c.submit(g, pure=False) result = await x assert result == value @gen_cluster(client=True) async def test_upload_file_refresh_delayed(c, s, a, b): with save_sys_modules(): for value in [123, 456]: with tmp_text("myfile.py", "def f():\n return {}".format(value)) as fn: await c.upload_file(fn) sys.path.append(os.path.dirname(fn)) from myfile import f b = delayed(f)() bb = c.compute(b, sync=False) result = await c.gather(bb) assert result == value @gen_cluster(client=True) async def test_upload_file_no_extension(c, s, a, b): with tmp_text("myfile", "") as fn: await c.upload_file(fn) @gen_cluster(client=True) async def test_upload_file_zip(c, s, a, b): def g(): import myfile return myfile.f() with save_sys_modules(): try: for value in [123, 456]: with tmp_text( "myfile.py", "def f():\n return {}".format(value) ) as fn_my_file: with zipfile.ZipFile("myfile.zip", "w") as z: z.write(fn_my_file, arcname=os.path.basename(fn_my_file)) await c.upload_file("myfile.zip") x = c.submit(g, pure=False) result = await x assert result == value finally: if os.path.exists("myfile.zip"): os.remove("myfile.zip") @gen_cluster(client=True) async def test_upload_file_egg(c, s, a, b): def g(): import package_1, package_2 return package_1.a, package_2.b # c.upload_file tells each worker to # - put this file in their local_directory # - modify their sys.path to include it # we don't care about the local_directory # but we do care about restoring the path with save_sys_modules(): for value in [123, 456]: with tmpfile() as dirname: os.mkdir(dirname) with open(os.path.join(dirname, "setup.py"), "w") as f: f.write("from setuptools import setup, find_packages\n") f.write( 'setup(name="my_package", packages=find_packages(), version="{}")\n'.format( value ) ) # test a package with an underscore in the name package_1 = os.path.join(dirname, "package_1") os.mkdir(package_1) with open(os.path.join(package_1, "__init__.py"), "w") as f: f.write("a = {}\n".format(value)) # test multiple top-level packages package_2 = os.path.join(dirname, "package_2") os.mkdir(package_2) with open(os.path.join(package_2, "__init__.py"), "w") as f: f.write("b = {}\n".format(value)) # compile these into an egg subprocess.check_call( [sys.executable, "setup.py", "bdist_egg"], cwd=dirname ) egg_root = os.path.join(dirname, "dist") # first file ending with '.egg' egg_name = [ fname for fname in os.listdir(egg_root) if fname.endswith(".egg") ][0] egg_path = os.path.join(egg_root, egg_name) await c.upload_file(egg_path) os.remove(egg_path) x = c.submit(g, pure=False) result = await x assert result == (value, value) @gen_cluster(client=True) async def test_upload_large_file(c, s, a, b): assert a.local_directory assert b.local_directory with tmp_text("myfile", "abc") as fn: with tmp_text("myfile2", "def") as fn2: await c._upload_large_file(fn, remote_filename="x") await c._upload_large_file(fn2) for w in [a, b]: assert os.path.exists(os.path.join(w.local_directory, "x")) assert os.path.exists(os.path.join(w.local_directory, "myfile2")) with open(os.path.join(w.local_directory, "x")) as f: assert f.read() == "abc" with open(os.path.join(w.local_directory, "myfile2")) as f: assert f.read() == "def" def test_upload_file_sync(c): def g(): import myfile return myfile.x with tmp_text("myfile.py", "x = 123") as fn: c.upload_file(fn) x = c.submit(g) assert x.result() == 123 @gen_cluster(client=True) async def test_upload_file_exception(c, s, a, b): with tmp_text("myfile.py", "syntax-error!") as fn: with pytest.raises(SyntaxError): await c.upload_file(fn) def test_upload_file_exception_sync(c): with tmp_text("myfile.py", "syntax-error!") as fn: with pytest.raises(SyntaxError): c.upload_file(fn) @pytest.mark.skip @gen_cluster() async def test_multiple_clients(s, a, b): a = await Client(s.address, asynchronous=True) b = await Client(s.address, asynchronous=True) x = a.submit(inc, 1) y = b.submit(inc, 2) assert x.client is a assert y.client is b xx = await x yy = await y assert xx == 2 assert yy == 3 z = a.submit(add, x, y) assert z.client is a zz = await z assert zz == 5 await a.close() await b.close() @gen_cluster(client=True) async def test_async_compute(c, s, a, b): from dask.delayed import delayed x = delayed(1) y = delayed(inc)(x) z = delayed(dec)(x) [yy, zz, aa] = c.compute([y, z, 3], sync=False) assert isinstance(yy, Future) assert isinstance(zz, Future) assert aa == 3 result = await c.gather([yy, zz]) assert result == [2, 0] assert isinstance(c.compute(y), Future) assert isinstance(c.compute([y]), (tuple, list)) @gen_cluster(client=True) async def test_async_compute_with_scatter(c, s, a, b): d = await c.scatter({("x", 1): 1, ("y", 1): 2}) x, y = d[("x", 1)], d[("y", 1)] from dask.delayed import delayed z = delayed(add)(delayed(inc)(x), delayed(inc)(y)) zz = c.compute(z) [result] = await c.gather([zz]) assert result == 2 + 3 def test_sync_compute(c): x = delayed(1) y = delayed(inc)(x) z = delayed(dec)(x) yy, zz = c.compute([y, z], sync=True) assert (yy, zz) == (2, 0) @gen_cluster(client=True) async def test_remote_scatter_gather(c, s, a, b): x, y, z = await c.scatter([1, 2, 3]) assert x.key in a.data or x.key in b.data assert y.key in a.data or y.key in b.data assert z.key in a.data or z.key in b.data xx, yy, zz = await c.gather([x, y, z]) assert (xx, yy, zz) == (1, 2, 3) @gen_cluster(timeout=1000, client=True) async def test_remote_submit_on_Future(c, s, a, b): x = c.submit(lambda x: x + 1, 1) y = c.submit(lambda x: x + 1, x) result = await y assert result == 3 def test_start_is_idempotent(c): c.start() c.start() c.start() x = c.submit(inc, 1) assert x.result() == 2 @gen_cluster(client=True) async def test_client_with_scheduler(c, s, a, b): assert s.nthreads == {a.address: a.nthreads, b.address: b.nthreads} x = c.submit(inc, 1) y = c.submit(inc, 2) z = c.submit(add, x, y) result = await x assert result == 1 + 1 result = await z assert result == 1 + 1 + 1 + 2 A, B, C = await c.scatter([1, 2, 3]) AA, BB, xx = await c.gather([A, B, x]) assert (AA, BB, xx) == (1, 2, 2) result = await c.get({"x": (inc, 1), "y": (add, "x", 10)}, "y", sync=False) assert result == 12 @pytest.mark.skipif( not sys.platform.startswith("linux"), reason="Need 127.0.0.2 to mean localhost" ) @gen_cluster([("127.0.0.1", 1), ("127.0.0.2", 2)], client=True) async def test_allow_restrictions(c, s, a, b): aws = s.workers[a.address] bws = s.workers[a.address] x = c.submit(inc, 1, workers=a.ip) await x assert s.tasks[x.key].who_has == {aws} assert not s.loose_restrictions x = c.submit(inc, 2, workers=a.ip, allow_other_workers=True) await x assert s.tasks[x.key].who_has == {aws} assert x.key in s.loose_restrictions L = c.map(inc, range(3, 13), workers=a.ip, allow_other_workers=True) await wait(L) assert all(s.tasks[f.key].who_has == {aws} for f in L) assert {f.key for f in L}.issubset(s.loose_restrictions) x = c.submit(inc, 15, workers="127.0.0.3", allow_other_workers=True) await x assert s.tasks[x.key].who_has assert x.key in s.loose_restrictions L = c.map(inc, range(15, 25), workers="127.0.0.3", allow_other_workers=True) await wait(L) assert all(s.tasks[f.key].who_has for f in L) assert {f.key for f in L}.issubset(s.loose_restrictions) with pytest.raises(ValueError): c.submit(inc, 1, allow_other_workers=True) with pytest.raises(ValueError): c.map(inc, [1], allow_other_workers=True) with pytest.raises(TypeError): c.submit(inc, 20, workers="127.0.0.1", allow_other_workers="Hello!") with pytest.raises(TypeError): c.map(inc, [20], workers="127.0.0.1", allow_other_workers="Hello!") @pytest.mark.skipif("True", reason="because") def test_bad_address(): try: Client("123.123.123.123:1234", timeout=0.1) except (IOError, TimeoutError) as e: assert "connect" in str(e).lower() try: Client("127.0.0.1:1234", timeout=0.1) except (IOError, TimeoutError) as e: assert "connect" in str(e).lower() def test_informative_error_on_cluster_type(): with pytest.raises(TypeError) as exc_info: Client(LocalCluster) assert "Scheduler address must be a string or a Cluster instance" in str( exc_info.value ) @gen_cluster(client=True) async def test_long_error(c, s, a, b): def bad(x): raise ValueError("a" * 100000) x = c.submit(bad, 10) try: await x except ValueError as e: assert len(str(e)) < 100000 tb = await x.traceback() assert all( len(line) < 100000 for line in concat(traceback.extract_tb(tb)) if isinstance(line, str) ) @gen_cluster(client=True) async def test_map_on_futures_with_kwargs(c, s, a, b): def f(x, y=10): return x + y futures = c.map(inc, range(10)) futures2 = c.map(f, futures, y=20) results = await c.gather(futures2) assert results == [i + 1 + 20 for i in range(10)] future = c.submit(inc, 100) future2 = c.submit(f, future, y=200) result = await future2 assert result == 100 + 1 + 200 class BadlySerializedObject: def __getstate__(self): return 1 def __setstate__(self, state): raise TypeError("hello!") class FatallySerializedObject: def __getstate__(self): return 1 def __setstate__(self, state): print("This should never have been deserialized, closing") import sys sys.exit(0) @gen_cluster(client=True) async def test_badly_serialized_input(c, s, a, b): o = BadlySerializedObject() future = c.submit(inc, o) futures = c.map(inc, range(10)) L = await c.gather(futures) assert list(L) == list(map(inc, range(10))) assert future.status == "error" with pytest.raises(Exception) as info: await future assert "hello!" in str(info.value) @pytest.mark.skipif("True", reason="") async def test_badly_serialized_input_stderr(capsys, c): o = BadlySerializedObject() future = c.submit(inc, o) start = time() while True: sleep(0.01) out, err = capsys.readouterr() if "hello!" in err: break assert time() - start < 20 assert future.status == "error" def test_repr(loop): funcs = [str, repr, lambda x: x._repr_html_()] with cluster(nworkers=3, worker_kwargs={"memory_limit": "2 GB"}) as (s, [a, b, c]): with Client(s["address"], loop=loop) as c: for func in funcs: text = func(c) assert c.scheduler.address in text assert "3" in text assert "6" in text assert "GB" in text if "<table" not in text: assert len(text) < 80 for func in funcs: text = func(c) assert "not connected" in text @gen_cluster(client=True) async def test_repr_async(c, s, a, b): c._repr_html_() @gen_cluster(client=True, worker_kwargs={"memory_limit": None}) async def test_repr_no_memory_limit(c, s, a, b): c._repr_html_() @gen_test() async def test_repr_localcluster(): cluster = await LocalCluster( processes=False, dashboard_address=None, asynchronous=True ) client = await Client(cluster, asynchronous=True) try: text = client._repr_html_() assert cluster.scheduler.address in text assert is_valid_xml(client._repr_html_()) finally: await client.close() await cluster.close() @gen_cluster(client=True) async def test_forget_simple(c, s, a, b): x = c.submit(inc, 1, retries=2) y = c.submit(inc, 2) z = c.submit(add, x, y, workers=[a.ip], allow_other_workers=True) await wait([x, y, z]) assert not s.waiting_data.get(x.key) assert not s.waiting_data.get(y.key) assert set(s.tasks) == {x.key, y.key, z.key} s.client_releases_keys(keys=[x.key], client=c.id) assert x.key in s.tasks s.client_releases_keys(keys=[z.key], client=c.id) assert x.key not in s.tasks assert z.key not in s.tasks assert not s.tasks[y.key].dependents s.client_releases_keys(keys=[y.key], client=c.id) assert not s.tasks @gen_cluster(client=True) async def test_forget_complex(e, s, A, B): a, b, c, d = await e.scatter(list(range(4))) ab = e.submit(add, a, b) cd = e.submit(add, c, d) ac = e.submit(add, a, c) acab = e.submit(add, ac, ab) await wait([a, b, c, d, ab, ac, cd, acab]) assert set(s.tasks) == {f.key for f in [ab, ac, cd, acab, a, b, c, d]} s.client_releases_keys(keys=[ab.key], client=e.id) assert set(s.tasks) == {f.key for f in [ab, ac, cd, acab, a, b, c, d]} s.client_releases_keys(keys=[b.key], client=e.id) assert set(s.tasks) == {f.key for f in [ac, cd, acab, a, c, d]} s.client_releases_keys(keys=[acab.key], client=e.id) assert set(s.tasks) == {f.key for f in [ac, cd, a, c, d]} assert b.key not in s.tasks start = time() while b.key in A.data or b.key in B.data: await asyncio.sleep(0.01) assert time() < start + 10 s.client_releases_keys(keys=[ac.key], client=e.id) assert set(s.tasks) == {f.key for f in [cd, a, c, d]} @gen_cluster(client=True) async def test_forget_in_flight(e, s, A, B): delayed2 = partial(delayed, pure=True) a, b, c, d = [delayed2(slowinc)(i) for i in range(4)] ab = delayed2(slowadd)(a, b, dask_key_name="ab") cd = delayed2(slowadd)(c, d, dask_key_name="cd") ac = delayed2(slowadd)(a, c, dask_key_name="ac") acab = delayed2(slowadd)(ac, ab, dask_key_name="acab") x, y = e.compute([ac, acab]) s.validate_state() for i in range(5): await asyncio.sleep(0.01) s.validate_state() s.client_releases_keys(keys=[y.key], client=e.id) s.validate_state() for k in [acab.key, ab.key, b.key]: assert k not in s.tasks @gen_cluster(client=True) async def test_forget_errors(c, s, a, b): x = c.submit(div, 1, 0) y = c.submit(inc, x) z = c.submit(inc, y) await wait([y]) assert x.key in s.exceptions assert x.key in s.exceptions_blame assert y.key in s.exceptions_blame assert z.key in s.exceptions_blame s.client_releases_keys(keys=[z.key], client=c.id) assert x.key in s.exceptions assert x.key in s.exceptions_blame assert y.key in s.exceptions_blame assert z.key not in s.exceptions_blame s.client_releases_keys(keys=[x.key], client=c.id) assert x.key in s.exceptions assert x.key in s.exceptions_blame assert y.key in s.exceptions_blame assert z.key not in s.exceptions_blame s.client_releases_keys(keys=[y.key], client=c.id) assert x.key not in s.exceptions assert x.key not in s.exceptions_blame assert y.key not in s.exceptions_blame assert z.key not in s.exceptions_blame def test_repr_sync(c): s = str(c) r = repr(c) assert c.scheduler.address in s assert c.scheduler.address in r assert str(2) in s # nworkers assert "cores" in s or "threads" in s @gen_cluster(client=True) async def test_waiting_data(c, s, a, b): x = c.submit(inc, 1) y = c.submit(inc, 2) z = c.submit(add, x, y, workers=[a.ip], allow_other_workers=True) await wait([x, y, z]) assert not s.waiting_data.get(x.key) assert not s.waiting_data.get(y.key) @gen_cluster() async def test_multi_client(s, a, b): c = await Client(s.address, asynchronous=True) f = await Client(s.address, asynchronous=True) assert set(s.client_comms) == {c.id, f.id} x = c.submit(inc, 1) y = f.submit(inc, 2) y2 = c.submit(inc, 2) assert y.key == y2.key await wait([x, y]) assert s.wants_what == { c.id: {x.key, y.key}, f.id: {y.key}, "fire-and-forget": set(), } assert s.who_wants == {x.key: {c.id}, y.key: {c.id, f.id}} await c.close() start = time() while c.id in s.wants_what: await asyncio.sleep(0.01) assert time() < start + 5 assert c.id not in s.wants_what assert c.id not in s.who_wants[y.key] assert x.key not in s.who_wants await f.close() start = time() while s.tasks: await asyncio.sleep(0.01) assert time() < start + 2, s.tasks def long_running_client_connection(address): with pristine_loop(): c = Client(address) x = c.submit(lambda x: x + 1, 10) x.result() sleep(100) @gen_cluster() async def test_cleanup_after_broken_client_connection(s, a, b): proc = mp_context.Process(target=long_running_client_connection, args=(s.address,)) proc.daemon = True proc.start() start = time() while not s.tasks: await asyncio.sleep(0.01) assert time() < start + 5 proc.terminate() start = time() while s.tasks: await asyncio.sleep(0.01) assert time() < start + 5 @gen_cluster() async def test_multi_garbage_collection(s, a, b): c = await Client(s.address, asynchronous=True) f = await Client(s.address, asynchronous=True) x = c.submit(inc, 1) y = f.submit(inc, 2) y2 = c.submit(inc, 2) assert y.key == y2.key await wait([x, y]) x.__del__() start = time() while x.key in a.data or x.key in b.data: await asyncio.sleep(0.01) assert time() < start + 5 assert s.wants_what == {c.id: {y.key}, f.id: {y.key}, "fire-and-forget": set()} assert s.who_wants == {y.key: {c.id, f.id}} y.__del__() start = time() while x.key in s.wants_what[f.id]: await asyncio.sleep(0.01) assert time() < start + 5 await asyncio.sleep(0.1) assert y.key in a.data or y.key in b.data assert s.wants_what == {c.id: {y.key}, f.id: set(), "fire-and-forget": set()} assert s.who_wants == {y.key: {c.id}} y2.__del__() start = time() while y.key in a.data or y.key in b.data: await asyncio.sleep(0.01) assert time() < start + 5 assert not any(v for v in s.wants_what.values()) assert not s.who_wants await c.close() await f.close() @gen_cluster(client=True) async def test__broadcast(c, s, a, b): x, y = await c.scatter([1, 2], broadcast=True) assert a.data == b.data == {x.key: 1, y.key: 2} @gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 4) async def test__broadcast_integer(c, s, *workers): x, y = await c.scatter([1, 2], broadcast=2) assert len(s.tasks[x.key].who_has) == 2 assert len(s.tasks[y.key].who_has) == 2 @gen_cluster(client=True) async def test__broadcast_dict(c, s, a, b): d = await c.scatter({"x": 1}, broadcast=True) assert a.data == b.data == {"x": 1} def test_broadcast(c, s, a, b): x, y = c.scatter([1, 2], broadcast=True) has_what = sync(c.loop, c.scheduler.has_what) assert {k: set(v) for k, v in has_what.items()} == { a["address"]: {x.key, y.key}, b["address"]: {x.key, y.key}, } [z] = c.scatter([3], broadcast=True, workers=[a["address"]]) has_what = sync(c.loop, c.scheduler.has_what) assert {k: set(v) for k, v in has_what.items()} == { a["address"]: {x.key, y.key, z.key}, b["address"]: {x.key, y.key}, } @gen_cluster(client=True) async def test_proxy(c, s, a, b): msg = await c.scheduler.proxy(msg={"op": "identity"}, worker=a.address) assert msg["id"] == a.identity()["id"] @gen_cluster(client=True) async def test__cancel(c, s, a, b): x = c.submit(slowinc, 1) y = c.submit(slowinc, x) while y.key not in s.tasks: await asyncio.sleep(0.01) await c.cancel([x]) assert x.cancelled() assert "cancel" in str(x) s.validate_state() start = time() while not y.cancelled(): await asyncio.sleep(0.01) assert time() < start + 5 assert not s.tasks s.validate_state() @gen_cluster(client=True) async def test_cancel_tuple_key(c, s, a, b): x = c.submit(inc, 1, key=("x", 0, 1)) await x await c.cancel(x) with pytest.raises(CancelledError): await x @gen_cluster() async def test_cancel_multi_client(s, a, b): c = await Client(s.address, asynchronous=True) f = await Client(s.address, asynchronous=True) x = c.submit(slowinc, 1) y = f.submit(slowinc, 1) assert x.key == y.key await c.cancel([x]) assert x.cancelled() assert not y.cancelled() start = time() while y.key not in s.tasks: await asyncio.sleep(0.01) assert time() < start + 5 out = await y assert out == 2 with pytest.raises(CancelledError): await x await c.close() await f.close() @gen_cluster(client=True) async def test_cancel_collection(c, s, a, b): L = c.map(double, [[1], [2], [3]]) x = db.Bag({("b", i): f for i, f in enumerate(L)}, "b", 3) await c.cancel(x) await c.cancel([x]) assert all(f.cancelled() for f in L) start = time() while s.tasks: assert time() < start + 1 await asyncio.sleep(0.01) def test_cancel(c): x = c.submit(slowinc, 1, key="x") y = c.submit(slowinc, x, key="y") z = c.submit(slowinc, y, key="z") c.cancel([y]) start = time() while not z.cancelled(): sleep(0.01) assert time() < start + 5 assert x.result() == 2 z.cancel() assert z.cancelled() @gen_cluster(client=True) async def test_future_type(c, s, a, b): x = c.submit(inc, 1) await wait([x]) assert x.type == int assert "int" in str(x) @gen_cluster(client=True) async def test_traceback_clean(c, s, a, b): x = c.submit(div, 1, 0) try: await x except Exception as e: f = e exc_type, exc_value, tb = sys.exc_info() while tb: assert "scheduler" not in tb.tb_frame.f_code.co_filename assert "worker" not in tb.tb_frame.f_code.co_filename tb = tb.tb_next @gen_cluster(client=True) async def test_map_differnet_lengths(c, s, a, b): assert len(c.map(add, [1, 2], [1, 2, 3])) == 2 def test_Future_exception_sync_2(loop, capsys): with cluster() as (s, [a, b]): with Client(s["address"], loop=loop) as c: assert dask.base.get_scheduler() == c.get out, err = capsys.readouterr() assert len(out.strip().split("\n")) == 1 assert dask.base.get_scheduler() != c.get @gen_cluster(timeout=60, client=True) async def test_async_persist(c, s, a, b): from dask.delayed import delayed, Delayed x = delayed(1) y = delayed(inc)(x) z = delayed(dec)(x) w = delayed(add)(y, z) yy, ww = c.persist([y, w]) assert type(yy) == type(y) assert type(ww) == type(w) assert len(yy.dask) == 1 assert len(ww.dask) == 1 assert len(w.dask) > 1 assert y.__dask_keys__() == yy.__dask_keys__() assert w.__dask_keys__() == ww.__dask_keys__() while y.key not in s.tasks and w.key not in s.tasks: await asyncio.sleep(0.01) assert s.who_wants[y.key] == {c.id} assert s.who_wants[w.key] == {c.id} yyf, wwf = c.compute([yy, ww]) yyy, www = await c.gather([yyf, wwf]) assert yyy == inc(1) assert www == add(inc(1), dec(1)) assert isinstance(c.persist(y), Delayed) assert isinstance(c.persist([y]), (list, tuple)) @gen_cluster(client=True) async def test__persist(c, s, a, b): pytest.importorskip("dask.array") import dask.array as da x = da.ones((10, 10), chunks=(5, 10)) y = 2 * (x + 1) assert len(y.dask) == 6 yy = c.persist(y) assert len(y.dask) == 6 assert len(yy.dask) == 2 assert all(isinstance(v, Future) for v in yy.dask.values()) assert yy.__dask_keys__() == y.__dask_keys__() g, h = c.compute([y, yy]) gg, hh = await c.gather([g, h]) assert (gg == hh).all() def test_persist(c): pytest.importorskip("dask.array") import dask.array as da x = da.ones((10, 10), chunks=(5, 10)) y = 2 * (x + 1) assert len(y.dask) == 6 yy = c.persist(y) assert len(y.dask) == 6 assert len(yy.dask) == 2 assert all(isinstance(v, Future) for v in yy.dask.values()) assert yy.__dask_keys__() == y.__dask_keys__() zz = yy.compute() z = y.compute() assert (zz == z).all() @gen_cluster(timeout=60, client=True) async def test_long_traceback(c, s, a, b): from distributed.protocol.pickle import dumps def deep(n): if n == 0: 1 / 0 else: return deep(n - 1) x = c.submit(deep, 200) await wait([x]) assert len(dumps(c.futures[x.key].traceback)) < 10000 assert isinstance(c.futures[x.key].exception, ZeroDivisionError) @gen_cluster(client=True) async def test_wait_on_collections(c, s, a, b): L = c.map(double, [[1], [2], [3]]) x = db.Bag({("b", i): f for i, f in enumerate(L)}, "b", 3) await wait(x) assert all(f.key in a.data or f.key in b.data for f in L) @gen_cluster(client=True) async def test_futures_of_get(c, s, a, b): x, y, z = c.map(inc, [1, 2, 3]) assert set(futures_of(0)) == set() assert set(futures_of(x)) == {x} assert set(futures_of([x, y, z])) == {x, y, z} assert set(futures_of([x, [y], [[z]]])) == {x, y, z} assert set(futures_of({"x": x, "y": [y]})) == {x, y} b = db.Bag({("b", i): f for i, f in enumerate([x, y, z])}, "b", 3) assert set(futures_of(b)) == {x, y, z} sg = SubgraphCallable( {"x": x, "y": y, "z": z, "out": (add, (add, (add, x, y), z), "in")}, "out", ("in",), ) assert set(futures_of(sg)) == {x, y, z} def test_futures_of_class(): da = pytest.importorskip("dask.array") assert futures_of([da.Array]) == [] @gen_cluster(client=True) async def test_futures_of_cancelled_raises(c, s, a, b): x = c.submit(inc, 1) await c.cancel([x]) with pytest.raises(CancelledError): await x with pytest.raises(CancelledError): await c.get({"x": (inc, x), "y": (inc, 2)}, ["x", "y"], sync=False) with pytest.raises(CancelledError): c.submit(inc, x) with pytest.raises(CancelledError): c.submit(add, 1, y=x) with pytest.raises(CancelledError): c.map(add, [1], y=x) assert "y" not in s.tasks @pytest.mark.skip @gen_cluster(nthreads=[("127.0.0.1", 1)], client=True) async def test_dont_delete_recomputed_results(c, s, w): x = c.submit(inc, 1) # compute first time await wait([x]) x.__del__() # trigger garbage collection await asyncio.sleep(0) xx = c.submit(inc, 1) # compute second time start = time() while xx.key not in w.data: # data shows up await asyncio.sleep(0.01) assert time() < start + 1 while time() < start + (s.delete_interval + 100) / 1000: # and stays assert xx.key in w.data await asyncio.sleep(0.01) @gen_cluster(nthreads=[], client=True) async def test_fatally_serialized_input(c, s): o = FatallySerializedObject() future = c.submit(inc, o) while not s.tasks: await asyncio.sleep(0.01) @pytest.mark.skip(reason="Use fast random selection now") @gen_cluster(client=True) async def test_balance_tasks_by_stacks(c, s, a, b): x = c.submit(inc, 1) await wait(x) y = c.submit(inc, 2) await wait(y) assert len(a.data) == len(b.data) == 1 @gen_cluster(client=True) async def test_run(c, s, a, b): results = await c.run(inc, 1) assert results == {a.address: 2, b.address: 2} results = await c.run(inc, 1, workers=[a.address]) assert results == {a.address: 2} results = await c.run(inc, 1, workers=[]) assert results == {} @gen_cluster(client=True) async def test_run_handles_picklable_data(c, s, a, b): futures = c.map(inc, range(10)) await wait(futures) def func(): return {}, set(), [], (), 1, "hello", b"100" results = await c.run_on_scheduler(func) assert results == func() results = await c.run(func) assert results == {w.address: func() for w in [a, b]} def test_run_sync(c, s, a, b): def func(x, y=10): return x + y result = c.run(func, 1, y=2) assert result == {a["address"]: 3, b["address"]: 3} result = c.run(func, 1, y=2, workers=[a["address"]]) assert result == {a["address"]: 3} @gen_cluster(client=True) async def test_run_coroutine(c, s, a, b): results = await c.run(geninc, 1, delay=0.05) assert results == {a.address: 2, b.address: 2} results = await c.run(geninc, 1, delay=0.05, workers=[a.address]) assert results == {a.address: 2} results = await c.run(geninc, 1, workers=[]) assert results == {} with pytest.raises(RuntimeError, match="hello"): await c.run(throws, 1) results = await c.run(asyncinc, 2, delay=0.01) assert results == {a.address: 3, b.address: 3} def test_run_coroutine_sync(c, s, a, b): result = c.run(geninc, 2, delay=0.01) assert result == {a["address"]: 3, b["address"]: 3} result = c.run(geninc, 2, workers=[a["address"]]) assert result == {a["address"]: 3} t1 = time() result = c.run(geninc, 2, delay=10, wait=False) t2 = time() assert result is None assert t2 - t1 <= 1.0 def test_run_exception(c): def raise_exception(exc_type, exc_msg): raise exc_type(exc_msg) for exc_type in [ValueError, RuntimeError]: with pytest.raises(exc_type, match="informative message"): c.run(raise_exception, exc_type, "informative message") def test_diagnostic_ui(loop): with cluster() as (s, [a, b]): a_addr = a["address"] b_addr = b["address"] with Client(s["address"], loop=loop) as c: d = c.nthreads() assert d == {a_addr: 1, b_addr: 1} d = c.nthreads([a_addr]) assert d == {a_addr: 1} d = c.nthreads(a_addr) assert d == {a_addr: 1} d = c.nthreads(a["address"]) assert d == {a_addr: 1} x = c.submit(inc, 1) y = c.submit(inc, 2) z = c.submit(inc, 3) wait([x, y, z]) d = c.who_has() assert set(d) == {x.key, y.key, z.key} assert all(w in [a_addr, b_addr] for v in d.values() for w in v) assert all(d.values()) d = c.who_has([x, y]) assert set(d) == {x.key, y.key} d = c.who_has(x) assert set(d) == {x.key} d = c.has_what() assert set(d) == {a_addr, b_addr} assert all(k in [x.key, y.key, z.key] for v in d.values() for k in v) d = c.has_what([a_addr]) assert set(d) == {a_addr} d = c.has_what(a_addr) assert set(d) == {a_addr} def test_diagnostic_nbytes_sync(c): incs = c.map(inc, [1, 2, 3]) doubles = c.map(double, [1, 2, 3]) wait(incs + doubles) assert c.nbytes(summary=False) == {k.key: sizeof(1) for k in incs + doubles} assert c.nbytes(summary=True) == {"inc": sizeof(1) * 3, "double": sizeof(1) * 3} @gen_cluster(client=True) async def test_diagnostic_nbytes(c, s, a, b): incs = c.map(inc, [1, 2, 3]) doubles = c.map(double, [1, 2, 3]) await wait(incs + doubles) assert s.get_nbytes(summary=False) == {k.key: sizeof(1) for k in incs + doubles} assert s.get_nbytes(summary=True) == {"inc": sizeof(1) * 3, "double": sizeof(1) * 3} @gen_test() async def test_worker_aliases(): s = await Scheduler(validate=True, port=0) a = Worker(s.address, name="alice") b = Worker(s.address, name="bob") w = Worker(s.address, name=3) await asyncio.gather(a, b, w) c = await Client(s.address, asynchronous=True) L = c.map(inc, range(10), workers="alice") future = await c.scatter(123, workers=3) await wait(L) assert len(a.data) == 10 assert len(b.data) == 0 assert dict(w.data) == {future.key: 123} for i, alias in enumerate([3, [3], "alice"]): result = await c.submit(lambda x: x + 1, i, workers=alias) assert result == i + 1 await c.close() await asyncio.gather(a.close(), b.close(), w.close()) await s.close() def test_persist_get_sync(c): dadd = delayed(add) x, y = delayed(1), delayed(2) xx = delayed(add)(x, x) yy = delayed(add)(y, y) xxyy = delayed(add)(xx, yy) xxyy2 = c.persist(xxyy) xxyy3 = delayed(add)(xxyy2, 10) assert xxyy3.compute() == ((1 + 1) + (2 + 2)) + 10 @gen_cluster(client=True) async def test_persist_get(c, s, a, b): dadd = delayed(add) x, y = delayed(1), delayed(2) xx = delayed(add)(x, x) yy = delayed(add)(y, y) xxyy = delayed(add)(xx, yy) xxyy2 = c.persist(xxyy) xxyy3 = delayed(add)(xxyy2, 10) await asyncio.sleep(0.5) result = await c.gather(c.get(xxyy3.dask, xxyy3.__dask_keys__(), sync=False)) assert result[0] == ((1 + 1) + (2 + 2)) + 10 result = await c.compute(xxyy3) assert result == ((1 + 1) + (2 + 2)) + 10 result = await c.compute(xxyy3) assert result == ((1 + 1) + (2 + 2)) + 10 result = await c.compute(xxyy3) assert result == ((1 + 1) + (2 + 2)) + 10 @pytest.mark.skipif(WINDOWS, reason="num_fds not supported on windows") def test_client_num_fds(loop): psutil = pytest.importorskip("psutil") with cluster() as (s, [a, b]): proc = psutil.Process() with Client(s["address"], loop=loop) as c: # first client to start loop before = proc.num_fds() # measure for i in range(4): with Client(s["address"], loop=loop): # start more clients pass start = time() while proc.num_fds() > before: sleep(0.01) assert time() < start + 4 @gen_cluster() async def test_startup_close_startup(s, a, b): c = await Client(s.address, asynchronous=True) await c.close() c = await Client(s.address, asynchronous=True) await c.close() def test_startup_close_startup_sync(loop): with cluster() as (s, [a, b]): with Client(s["address"], loop=loop) as c: sleep(0.1) with Client(s["address"]) as c: pass with Client(s["address"]) as c: pass sleep(0.1) with Client(s["address"]) as c: pass @gen_cluster(client=True) async def test_badly_serialized_exceptions(c, s, a, b): def f(): class BadlySerializedException(Exception): def __reduce__(self): raise TypeError() raise BadlySerializedException("hello world") x = c.submit(f) try: result = await x except Exception as e: assert "hello world" in str(e) else: assert False @gen_cluster(client=True) async def test_rebalance(c, s, a, b): aws = s.workers[a.address] bws = s.workers[b.address] x, y = await c.scatter([1, 2], workers=[a.address]) assert len(a.data) == 2 assert len(b.data) == 0 s.validate_state() await c.rebalance() s.validate_state() assert len(b.data) == 1 assert {ts.key for ts in bws.has_what} == set(b.data) assert bws in s.tasks[x.key].who_has or bws in s.tasks[y.key].who_has assert len(a.data) == 1 assert {ts.key for ts in aws.has_what} == set(a.data) assert aws not in s.tasks[x.key].who_has or aws not in s.tasks[y.key].who_has @gen_cluster(nthreads=[("127.0.0.1", 1)] * 4, client=True) async def test_rebalance_workers(e, s, a, b, c, d): w, x, y, z = await e.scatter([1, 2, 3, 4], workers=[a.address]) assert len(a.data) == 4 assert len(b.data) == 0 assert len(c.data) == 0 assert len(d.data) == 0 await e.rebalance([x, y], workers=[a.address, c.address]) assert len(a.data) == 3 assert len(b.data) == 0 assert len(c.data) == 1 assert len(d.data) == 0 assert c.data == {x.key: 2} or c.data == {y.key: 3} await e.rebalance() assert len(a.data) == 1 assert len(b.data) == 1 assert len(c.data) == 1 assert len(d.data) == 1 s.validate_state() @gen_cluster(client=True) async def test_rebalance_execution(c, s, a, b): futures = c.map(inc, range(10), workers=a.address) await c.rebalance(futures) assert len(a.data) == len(b.data) == 5 s.validate_state() def test_rebalance_sync(c, s, a, b): futures = c.map(inc, range(10), workers=[a["address"]]) c.rebalance(futures) has_what = c.has_what() assert len(has_what) == 2 assert list(valmap(len, has_what).values()) == [5, 5] @gen_cluster(client=True) async def test_rebalance_unprepared(c, s, a, b): futures = c.map(slowinc, range(10), delay=0.05, workers=a.address) await asyncio.sleep(0.1) await c.rebalance(futures) s.validate_state() @gen_cluster(client=True) async def test_rebalance_raises_missing_data(c, s, a, b): with pytest.raises(ValueError, match="keys were found to be missing"): futures = await c.scatter(range(100)) keys = [f.key for f in futures] del futures await c.rebalance(keys) @gen_cluster(client=True) async def test_receive_lost_key(c, s, a, b): x = c.submit(inc, 1, workers=[a.address]) await x await a.close() start = time() while x.status == "finished": assert time() < start + 5 await asyncio.sleep(0.01) @pytest.mark.skipif( not sys.platform.startswith("linux"), reason="Need 127.0.0.2 to mean localhost" ) @gen_cluster([("127.0.0.1", 1), ("127.0.0.2", 2)], client=True) async def test_unrunnable_task_runs(c, s, a, b): x = c.submit(inc, 1, workers=[a.ip]) await x await a.close() start = time() while x.status == "finished": assert time() < start + 5 await asyncio.sleep(0.01) assert s.tasks[x.key] in s.unrunnable assert s.get_task_status(keys=[x.key]) == {x.key: "no-worker"} w = await Worker(s.address, loop=s.loop) start = time() while x.status != "finished": assert time() < start + 2 await asyncio.sleep(0.01) assert s.tasks[x.key] not in s.unrunnable result = await x assert result == 2 await w.close() @gen_cluster(client=True, nthreads=[]) async def test_add_worker_after_tasks(c, s): futures = c.map(inc, range(10)) n = await Nanny(s.address, nthreads=2, loop=s.loop, port=0) await c.gather(futures) await n.close() @pytest.mark.skipif( not sys.platform.startswith("linux"), reason="Need 127.0.0.2 to mean localhost" ) @gen_cluster([("127.0.0.1", 1), ("127.0.0.2", 2)], client=True) async def test_workers_register_indirect_data(c, s, a, b): [x] = await c.scatter([1], workers=a.address) y = c.submit(inc, x, workers=b.ip) await y assert b.data[x.key] == 1 assert s.tasks[x.key].who_has == {s.workers[a.address], s.workers[b.address]} assert s.workers[b.address].has_what == {s.tasks[x.key], s.tasks[y.key]} s.validate_state() @gen_cluster(client=True) async def test_submit_on_cancelled_future(c, s, a, b): x = c.submit(inc, 1) await x await c.cancel(x) with pytest.raises(CancelledError): c.submit(inc, x) @gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 10) async def test_replicate(c, s, *workers): [a, b] = await c.scatter([1, 2]) await s.replicate(keys=[a.key, b.key], n=5) s.validate_state() assert len(s.tasks[a.key].who_has) == 5 assert len(s.tasks[b.key].who_has) == 5 assert sum(a.key in w.data for w in workers) == 5 assert sum(b.key in w.data for w in workers) == 5 @gen_cluster(client=True) async def test_replicate_tuple_keys(c, s, a, b): x = delayed(inc)(1, dask_key_name=("x", 1)) f = c.persist(x) await c.replicate(f, n=5) s.validate_state() assert a.data and b.data await c.rebalance(f) s.validate_state() @gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 10) async def test_replicate_workers(c, s, *workers): [a, b] = await c.scatter([1, 2], workers=[workers[0].address]) await s.replicate( keys=[a.key, b.key], n=5, workers=[w.address for w in workers[:5]] ) assert len(s.tasks[a.key].who_has) == 5 assert len(s.tasks[b.key].who_has) == 5 assert sum(a.key in w.data for w in workers[:5]) == 5 assert sum(b.key in w.data for w in workers[:5]) == 5 assert sum(a.key in w.data for w in workers[5:]) == 0 assert sum(b.key in w.data for w in workers[5:]) == 0 await s.replicate(keys=[a.key, b.key], n=1) assert len(s.tasks[a.key].who_has) == 1 assert len(s.tasks[b.key].who_has) == 1 assert sum(a.key in w.data for w in workers) == 1 assert sum(b.key in w.data for w in workers) == 1 s.validate_state() await s.replicate(keys=[a.key, b.key], n=None) # all assert len(s.tasks[a.key].who_has) == 10 assert len(s.tasks[b.key].who_has) == 10 s.validate_state() await s.replicate( keys=[a.key, b.key], n=1, workers=[w.address for w in workers[:5]] ) assert sum(a.key in w.data for w in workers[:5]) == 1 assert sum(b.key in w.data for w in workers[:5]) == 1 assert sum(a.key in w.data for w in workers[5:]) == 5 assert sum(b.key in w.data for w in workers[5:]) == 5 s.validate_state() class CountSerialization: def __init__(self): self.n = 0 def __setstate__(self, n): self.n = n + 1 def __getstate__(self): return self.n @gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 10) async def test_replicate_tree_branching(c, s, *workers): obj = CountSerialization() [future] = await c.scatter([obj]) await s.replicate(keys=[future.key], n=10) max_count = max(w.data[future.key].n for w in workers) assert max_count > 1 @gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 10) async def test_client_replicate(c, s, *workers): x = c.submit(inc, 1) y = c.submit(inc, 2) await c.replicate([x, y], n=5) assert len(s.tasks[x.key].who_has) == 5 assert len(s.tasks[y.key].who_has) == 5 await c.replicate([x, y], n=3) assert len(s.tasks[x.key].who_has) == 3 assert len(s.tasks[y.key].who_has) == 3 await c.replicate([x, y]) s.validate_state() assert len(s.tasks[x.key].who_has) == 10 assert len(s.tasks[y.key].who_has) == 10 @pytest.mark.skipif( not sys.platform.startswith("linux"), reason="Need 127.0.0.2 to mean localhost" ) @gen_cluster( client=True, nthreads=[("127.0.0.1", 1), ("127.0.0.2", 1), ("127.0.0.2", 1)], timeout=None, ) async def test_client_replicate_host(client, s, a, b, c): aws = s.workers[a.address] bws = s.workers[b.address] cws = s.workers[c.address] x = client.submit(inc, 1, workers="127.0.0.2") await wait([x]) assert s.tasks[x.key].who_has == {bws} or s.tasks[x.key].who_has == {cws} await client.replicate([x], workers=["127.0.0.2"]) assert s.tasks[x.key].who_has == {bws, cws} await client.replicate([x], workers=["127.0.0.1"]) assert s.tasks[x.key].who_has == {aws, bws, cws} def test_client_replicate_sync(c): x = c.submit(inc, 1) y = c.submit(inc, 2) c.replicate([x, y], n=2) who_has = c.who_has() assert len(who_has[x.key]) == len(who_has[y.key]) == 2 with pytest.raises(ValueError): c.replicate([x], n=0) assert y.result() == 3 @pytest.mark.skipif(WINDOWS, reason="Windows timer too coarse-grained") @gen_cluster(client=True, nthreads=[("127.0.0.1", 4)] * 1) async def test_task_load_adapts_quickly(c, s, a): future = c.submit(slowinc, 1, delay=0.2) # slow await wait(future) assert 0.15 < s.task_prefixes["slowinc"].duration_average < 0.4 futures = c.map(slowinc, range(10), delay=0) # very fast await wait(futures) assert 0 < s.task_prefixes["slowinc"].duration_average < 0.1 @gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 2) async def test_even_load_after_fast_functions(c, s, a, b): x = c.submit(inc, 1, workers=a.address) # very fast y = c.submit(inc, 2, workers=b.address) # very fast await wait([x, y]) futures = c.map(inc, range(2, 11)) await wait(futures) assert any(f.key in a.data for f in futures) assert any(f.key in b.data for f in futures) # assert abs(len(a.data) - len(b.data)) <= 3 @gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 2) async def test_even_load_on_startup(c, s, a, b): x, y = c.map(inc, [1, 2]) await wait([x, y]) assert len(a.data) == len(b.data) == 1 @pytest.mark.skip @gen_cluster(client=True, nthreads=[("127.0.0.1", 2)] * 2) async def test_contiguous_load(c, s, a, b): w, x, y, z = c.map(inc, [1, 2, 3, 4]) await wait([w, x, y, z]) groups = [set(a.data), set(b.data)] assert {w.key, x.key} in groups assert {y.key, z.key} in groups @gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 4) async def test_balanced_with_submit(c, s, *workers): L = [c.submit(slowinc, i) for i in range(4)] await wait(L) for w in workers: assert len(w.data) == 1 @gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 4) async def test_balanced_with_submit_and_resident_data(c, s, *workers): [x] = await c.scatter([10], broadcast=True) L = [c.submit(slowinc, x, pure=False) for i in range(4)] await wait(L) for w in workers: assert len(w.data) == 2 @gen_cluster(client=True, nthreads=[("127.0.0.1", 20)] * 2) async def test_scheduler_saturates_cores(c, s, a, b): for delay in [0, 0.01, 0.1]: futures = c.map(slowinc, range(100), delay=delay) futures = c.map(slowinc, futures, delay=delay / 10) while not s.tasks: if s.tasks: assert all( len(p) >= 20 for w in s.workers.values() for p in w.processing.values() ) await asyncio.sleep(0.01) @gen_cluster(client=True, nthreads=[("127.0.0.1", 20)] * 2) async def test_scheduler_saturates_cores_random(c, s, a, b): for delay in [0, 0.01, 0.1]: futures = c.map(randominc, range(100), scale=0.1) while not s.tasks: if s.tasks: assert all( len(p) >= 20 for w in s.workers.values() for p in w.processing.values() ) await asyncio.sleep(0.01) @gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 4) async def test_cancel_clears_processing(c, s, *workers): da = pytest.importorskip("dask.array") x = c.submit(slowinc, 1, delay=0.2) while not s.tasks: await asyncio.sleep(0.01) await c.cancel(x) start = time() while any(v for w in s.workers.values() for v in w.processing): assert time() < start + 0.2 await asyncio.sleep(0.01) s.validate_state() def test_default_get(): with cluster() as (s, [a, b]): pre_get = dask.base.get_scheduler() pytest.raises(KeyError, dask.config.get, "shuffle") with Client(s["address"], set_as_default=True) as c: assert dask.base.get_scheduler() == c.get assert dask.config.get("shuffle") == "tasks" assert dask.base.get_scheduler() == pre_get pytest.raises(KeyError, dask.config.get, "shuffle") c = Client(s["address"], set_as_default=False) assert dask.base.get_scheduler() == pre_get pytest.raises(KeyError, dask.config.get, "shuffle") c.close() c = Client(s["address"], set_as_default=True) assert dask.config.get("shuffle") == "tasks" assert dask.base.get_scheduler() == c.get c.close() assert dask.base.get_scheduler() == pre_get pytest.raises(KeyError, dask.config.get, "shuffle") with Client(s["address"]) as c: assert dask.base.get_scheduler() == c.get with Client(s["address"], set_as_default=False) as c: assert dask.base.get_scheduler() != c.get assert dask.base.get_scheduler() != c.get with Client(s["address"], set_as_default=True) as c1: assert dask.base.get_scheduler() == c1.get with Client(s["address"], set_as_default=True) as c2: assert dask.base.get_scheduler() == c2.get assert dask.base.get_scheduler() == c1.get assert dask.base.get_scheduler() == pre_get @gen_cluster(client=True) async def test_get_processing(c, s, a, b): processing = await c.processing() assert processing == valmap(tuple, s.processing) futures = c.map( slowinc, range(10), delay=0.1, workers=[a.address], allow_other_workers=True ) await asyncio.sleep(0.2) x = await c.processing() assert set(x) == {a.address, b.address} x = await c.processing(workers=[a.address]) assert isinstance(x[a.address], (list, tuple)) @gen_cluster(client=True) async def test_get_foo(c, s, a, b): futures = c.map(inc, range(10)) await wait(futures) x = await c.scheduler.ncores() assert x == s.nthreads x = await c.scheduler.ncores(workers=[a.address]) assert x == {a.address: s.nthreads[a.address]} x = await c.scheduler.has_what() assert valmap(sorted, x) == valmap(sorted, s.has_what) x = await c.scheduler.has_what(workers=[a.address]) assert valmap(sorted, x) == {a.address: sorted(s.has_what[a.address])} x = await c.scheduler.nbytes(summary=False) assert x == s.get_nbytes(summary=False) x = await c.scheduler.nbytes(keys=[futures[0].key], summary=False) assert x == {futures[0].key: s.tasks[futures[0].key].nbytes} x = await c.scheduler.who_has() assert valmap(sorted, x) == valmap(sorted, s.who_has) x = await c.scheduler.who_has(keys=[futures[0].key]) assert valmap(sorted, x) == {futures[0].key: sorted(s.who_has[futures[0].key])} def assert_dict_key_equal(expected, actual): assert set(expected.keys()) == set(actual.keys()) for k in actual.keys(): ev = expected[k] av = actual[k] assert list(ev) == list(av) @gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 3) async def test_get_foo_lost_keys(c, s, u, v, w): x = c.submit(inc, 1, workers=[u.address]) y = await c.scatter(3, workers=[v.address]) await wait([x, y]) ua, va, wa = u.address, v.address, w.address d = await c.scheduler.has_what() assert_dict_key_equal(d, {ua: [x.key], va: [y.key], wa: []}) d = await c.scheduler.has_what(workers=[ua, va]) assert_dict_key_equal(d, {ua: [x.key], va: [y.key]}) d = await c.scheduler.who_has() assert_dict_key_equal(d, {x.key: [ua], y.key: [va]}) d = await c.scheduler.who_has(keys=[x.key, y.key]) assert_dict_key_equal(d, {x.key: [ua], y.key: [va]}) await u.close() await v.close() d = await c.scheduler.has_what() assert_dict_key_equal(d, {wa: []}) d = await c.scheduler.has_what(workers=[ua, va]) assert_dict_key_equal(d, {ua: [], va: []}) # The scattered key cannot be recomputed so it is forgotten d = await c.scheduler.who_has() assert_dict_key_equal(d, {x.key: []}) # ... but when passed explicitly, it is included in the result d = await c.scheduler.who_has(keys=[x.key, y.key]) assert_dict_key_equal(d, {x.key: [], y.key: []}) @pytest.mark.slow @gen_cluster( client=True, Worker=Nanny, clean_kwargs={"threads": False, "processes": False} ) async def test_bad_tasks_fail(c, s, a, b): f = c.submit(sys.exit, 0) with pytest.raises(KilledWorker) as info: await f assert info.value.last_worker.nanny in {a.address, b.address} await asyncio.gather(a.close(), b.close()) def test_get_processing_sync(c, s, a, b): processing = c.processing() assert not any(v for v in processing.values()) futures = c.map( slowinc, range(10), delay=0.1, workers=[a["address"]], allow_other_workers=False ) sleep(0.2) aa = a["address"] bb = b["address"] processing = c.processing() assert set(c.processing(aa)) == {aa} assert set(c.processing([aa])) == {aa} c.cancel(futures) def test_close_idempotent(c): c.close() c.close() c.close() @nodebug def test_get_returns_early(c): start = time() with suppress(RuntimeError): result = c.get({"x": (throws, 1), "y": (sleep, 1)}, ["x", "y"]) assert time() < start + 0.5 # Futures should be released and forgotten wait_for(lambda: not c.futures, timeout=0.1) wait_for(lambda: not any(c.processing().values()), timeout=3) x = c.submit(inc, 1) x.result() with suppress(RuntimeError): result = c.get({"x": (throws, 1), x.key: (inc, 1)}, ["x", x.key]) assert x.key in c.futures @pytest.mark.slow @gen_cluster(Worker=Nanny, client=True) async def test_Client_clears_references_after_restart(c, s, a, b): x = c.submit(inc, 1) assert x.key in c.refcount await c.restart() assert x.key not in c.refcount key = x.key del x import gc gc.collect() await asyncio.sleep(0) assert key not in c.refcount def test_get_stops_work_after_error(c): with pytest.raises(RuntimeError): c.get({"x": (throws, 1), "y": (sleep, 1.5)}, ["x", "y"]) start = time() while any(c.processing().values()): sleep(0.01) assert time() < start + 0.5 def test_as_completed_list(c): seq = c.map(inc, range(5)) seq2 = list(as_completed(seq)) assert set(c.gather(seq2)) == {1, 2, 3, 4, 5} def test_as_completed_results(c): seq = c.map(inc, range(5)) seq2 = list(as_completed(seq, with_results=True)) assert set(pluck(1, seq2)) == {1, 2, 3, 4, 5} assert set(pluck(0, seq2)) == set(seq) @pytest.mark.parametrize("with_results", [True, False]) def test_as_completed_batches(c, with_results): n = 50 futures = c.map(slowinc, range(n), delay=0.01) out = [] for batch in as_completed(futures, with_results=with_results).batches(): assert isinstance(batch, (tuple, list)) sleep(0.05) out.extend(batch) assert len(out) == n if with_results: assert set(pluck(1, out)) == set(range(1, n + 1)) else: assert set(out) == set(futures) def test_as_completed_next_batch(c): futures = c.map(slowinc, range(2), delay=0.1) ac = as_completed(futures) assert not ac.is_empty() assert ac.next_batch(block=False) == [] assert set(ac.next_batch(block=True)).issubset(futures) while not ac.is_empty(): assert set(ac.next_batch(block=True)).issubset(futures) assert ac.is_empty() assert not ac.has_ready() @gen_test() async def test_status(): s = await Scheduler(port=0) c = await Client(s.address, asynchronous=True) assert c.status == "running" x = c.submit(inc, 1) await c.close() assert c.status == "closed" await s.close() @gen_cluster(client=True) async def test_persist_optimize_graph(c, s, a, b): i = 10 for method in [c.persist, c.compute]: b = db.range(i, npartitions=2) i += 1 b2 = b.map(inc) b3 = b2.map(inc) b4 = method(b3, optimize_graph=False) await wait(b4) assert set(map(tokey, b3.__dask_keys__())).issubset(s.tasks) b = db.range(i, npartitions=2) i += 1 b2 = b.map(inc) b3 = b2.map(inc) b4 = method(b3, optimize_graph=True) await wait(b4) assert not any(tokey(k) in s.tasks for k in b2.__dask_keys__()) @gen_cluster(client=True, nthreads=[]) async def test_scatter_raises_if_no_workers(c, s): with pytest.raises(TimeoutError): await c.scatter(1, timeout=0.5) @pytest.mark.slow def test_reconnect(loop): w = Worker("127.0.0.1", 9393, loop=loop) loop.add_callback(w.start) scheduler_cli = [ "dask-scheduler", "--host", "127.0.0.1", "--port", "9393", "--no-dashboard", ] with popen(scheduler_cli) as s: c = Client("127.0.0.1:9393", loop=loop) start = time() while len(c.nthreads()) != 1: sleep(0.1) assert time() < start + 3 x = c.submit(inc, 1) assert x.result() == 2 start = time() while c.status != "connecting": assert time() < start + 5 sleep(0.01) assert x.status == "cancelled" with pytest.raises(CancelledError): x.result() with popen(scheduler_cli) as s: start = time() while c.status != "running": sleep(0.1) assert time() < start + 5 start = time() while len(c.nthreads()) != 1: sleep(0.05) assert time() < start + 15 x = c.submit(inc, 1) assert x.result() == 2 start = time() while True: try: x.result() assert False except CommClosedError: continue except CancelledError: break assert time() < start + 5 sleep(0.1) sync(loop, w.close) c.close() @gen_cluster(client=True, nthreads=[], client_kwargs={"timeout": 0.5}) async def test_reconnect_timeout(c, s): with captured_logger(logging.getLogger("distributed.client")) as logger: await s.close() start = time() while c.status != "closed": await c._update_scheduler_info() await asyncio.sleep(0.05) assert time() < start + 5, "Timeout waiting for reconnect to fail" text = logger.getvalue() assert "Failed to reconnect" in text @pytest.mark.slow @pytest.mark.skipif(WINDOWS, reason="num_fds not supported on windows") @pytest.mark.skipif(sys.version_info < (3, 7), reason="TODO: intermittent failures") @pytest.mark.parametrize("worker,count,repeat", [(Worker, 100, 5), (Nanny, 10, 20)]) def test_open_close_many_workers(loop, worker, count, repeat): psutil = pytest.importorskip("psutil") proc = psutil.Process() with cluster(nworkers=0, active_rpc_timeout=2) as (s, _): gc.collect() before = proc.num_fds() done = Semaphore(0) running = weakref.WeakKeyDictionary() workers = set() status = True async def start_worker(sleep, duration, repeat=1): for i in range(repeat): await asyncio.sleep(sleep) if not status: return w = worker(s["address"], loop=loop) running[w] = None await w workers.add(w) addr = w.worker_address running[w] = addr await asyncio.sleep(duration) await w.close() del w await asyncio.sleep(0) done.release() for i in range(count): loop.add_callback( start_worker, random.random() / 5, random.random() / 5, repeat=repeat ) with Client(s["address"], loop=loop) as c: sleep(1) for i in range(count): done.acquire(timeout=5) gc.collect() if not running: break start = time() while c.nthreads(): sleep(0.2) assert time() < start + 10 while len(workers) < count * repeat: sleep(0.2) status = False [c.sync(w.close) for w in list(workers)] for w in workers: assert w.status == Status.closed start = time() while proc.num_fds() > before: print("fds:", before, proc.num_fds()) sleep(0.1) if time() > start + 10: if worker == Worker: # this is an esoteric case print("File descriptors did not clean up") break else: raise ValueError("File descriptors did not clean up") @gen_cluster(client=False, timeout=None) async def test_idempotence(s, a, b): c = await Client(s.address, asynchronous=True) f = await Client(s.address, asynchronous=True) # Submit x = c.submit(inc, 1) await x log = list(s.transition_log) len_single_submit = len(log) # see last assert y = f.submit(inc, 1) assert x.key == y.key await y await asyncio.sleep(0.1) log2 = list(s.transition_log) assert log == log2 # Error a = c.submit(div, 1, 0) await wait(a) assert a.status == "error" log = list(s.transition_log) b = f.submit(div, 1, 0) assert a.key == b.key await wait(b) await asyncio.sleep(0.1) log2 = list(s.transition_log) assert log == log2 s.transition_log.clear() # Simultaneous Submit d = c.submit(inc, 2) e = c.submit(inc, 2) await wait([d, e]) assert len(s.transition_log) == len_single_submit await c.close() await f.close() def test_scheduler_info(c): info = c.scheduler_info() assert isinstance(info, dict) assert len(info["workers"]) == 2 def test_write_scheduler_file(c): info = c.scheduler_info() with tmpfile("json") as scheduler_file: c.write_scheduler_file(scheduler_file) with Client(scheduler_file=scheduler_file) as c2: info2 = c2.scheduler_info() assert c.scheduler.address == c2.scheduler.address # test that a ValueError is raised if the scheduler_file # attribute is already set with pytest.raises(ValueError): c.write_scheduler_file(scheduler_file) def test_get_versions(c): requests = pytest.importorskip("requests") v = c.get_versions() assert v["scheduler"] is not None assert v["client"] is not None assert len(v["workers"]) == 2 for k, v in v["workers"].items(): assert v is not None c.get_versions(check=True) # smoke test for versions # that this does not raise v = c.get_versions(packages=["requests"]) assert v["client"]["packages"]["requests"] == requests.__version__ @gen_cluster(client=True) async def test_async_get_versions(c, s, a, b): await c.get_versions(check=True) def test_threaded_get_within_distributed(c): import dask.multiprocessing for get in [dask.local.get_sync, dask.multiprocessing.get, dask.threaded.get]: def f(): return get({"x": (lambda: 1,)}, "x") future = c.submit(f) assert future.result() == 1 @gen_cluster(client=True) async def test_lose_scattered_data(c, s, a, b): [x] = await c.scatter([1], workers=a.address) await a.close() await asyncio.sleep(0.1) assert x.status == "cancelled" assert x.key not in s.tasks @gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 3) async def test_partially_lose_scattered_data(e, s, a, b, c): x = await e.scatter(1, workers=a.address) await e.replicate(x, n=2) await a.close() await asyncio.sleep(0.1) assert x.status == "finished" assert s.get_task_status(keys=[x.key]) == {x.key: "memory"} @gen_cluster(client=True) async def test_scatter_compute_lose(c, s, a, b): [x] = await c.scatter([[1, 2, 3, 4]], workers=a.address) y = c.submit(inc, 1, workers=b.address) z = c.submit(slowadd, x, y, delay=0.2) await asyncio.sleep(0.1) await a.close() with pytest.raises(CancelledError): await wait(z) assert x.status == "cancelled" assert y.status == "finished" assert z.status == "cancelled" @gen_cluster(client=True) async def test_scatter_compute_store_lose(c, s, a, b): """ Create irreplaceable data on one machine, cause a dependent computation to occur on another and complete Kill the machine with the irreplaceable data. What happens to the complete result? How about after it GCs and tries to come back? """ x = await c.scatter(1, workers=a.address) xx = c.submit(inc, x, workers=a.address) y = c.submit(inc, 1) z = c.submit(slowadd, xx, y, delay=0.2, workers=b.address) await wait(z) await a.close() start = time() while x.status == "finished": await asyncio.sleep(0.01) assert time() < start + 2 # assert xx.status == 'finished' assert y.status == "finished" assert z.status == "finished" zz = c.submit(inc, z) await wait(zz) zkey = z.key del z start = time() while s.get_task_status(keys=[zkey]) != {zkey: "released"}: await asyncio.sleep(0.01) assert time() < start + 2 xxkey = xx.key del xx start = time() while x.key in s.tasks and zkey not in s.tasks and xxkey not in s.tasks: await asyncio.sleep(0.01) assert time() < start + 2 @gen_cluster(client=True) async def test_scatter_compute_store_lose_processing(c, s, a, b): """ Create irreplaceable data on one machine, cause a dependent computation to occur on another and complete Kill the machine with the irreplaceable data. What happens to the complete result? How about after it GCs and tries to come back? """ [x] = await c.scatter([1], workers=a.address) y = c.submit(slowinc, x, delay=0.2) z = c.submit(inc, y) await asyncio.sleep(0.1) await a.close() start = time() while x.status == "finished": await asyncio.sleep(0.01) assert time() < start + 2 assert y.status == "cancelled" assert z.status == "cancelled" @gen_cluster(client=False) async def test_serialize_future(s, a, b): c1 = await Client(s.address, asynchronous=True) c2 = await Client(s.address, asynchronous=True) future = c1.submit(lambda: 1) result = await future for ci in (c1, c2): for ctxman in ci.as_current, lambda: temp_default_client(ci): with ctxman(): future2 = pickle.loads(pickle.dumps(future)) assert future2.client is ci assert tokey(future2.key) in ci.futures result2 = await future2 assert result == result2 await c1.close() await c2.close() @gen_cluster(client=False) async def test_temp_default_client(s, a, b): c1 = await Client(s.address, asynchronous=True) c2 = await Client(s.address, asynchronous=True) with temp_default_client(c1): assert default_client() is c1 assert default_client(c2) is c2 with temp_default_client(c2): assert default_client() is c2 assert default_client(c1) is c1 await c1.close() await c2.close() @gen_cluster(client=True) async def test_as_current(c, s, a, b): c1 = await Client(s.address, asynchronous=True) c2 = await Client(s.address, asynchronous=True) with temp_default_client(c): assert Client.current() is c with pytest.raises(ValueError): Client.current(allow_global=False) with c1.as_current(): assert Client.current() is c1 assert Client.current(allow_global=True) is c1 with c2.as_current(): assert Client.current() is c2 assert Client.current(allow_global=True) is c2 await c1.close() await c2.close() def test_as_current_is_thread_local(s): l1 = threading.Lock() l2 = threading.Lock() l3 = threading.Lock() l4 = threading.Lock() l1.acquire() l2.acquire() l3.acquire() l4.acquire() def run1(): with Client(s.address) as c: with c.as_current(): l1.acquire() l2.release() try: # This line runs only when both run1 and run2 are inside the # context manager assert Client.current(allow_global=False) is c finally: l3.acquire() l4.release() def run2(): with Client(s.address) as c: with c.as_current(): l1.release() l2.acquire() try: # This line runs only when both run1 and run2 are inside the # context manager assert Client.current(allow_global=False) is c finally: l3.release() l4.acquire() t1 = threading.Thread(target=run1) t2 = threading.Thread(target=run2) t1.start() t2.start() t1.join() t2.join() @pytest.mark.xfail( sys.version_info < (3, 7), reason="Python 3.6 contextvars are not copied on Task creation", ) @gen_cluster(client=False) async def test_as_current_is_task_local(s, a, b): l1 = asyncio.Lock() l2 = asyncio.Lock() l3 = asyncio.Lock() l4 = asyncio.Lock() await l1.acquire() await l2.acquire() await l3.acquire() await l4.acquire() async def run1(): async with Client(s.address, asynchronous=True) as c: with c.as_current(): await l1.acquire() l2.release() try: # This line runs only when both run1 and run2 are inside the # context manager assert Client.current(allow_global=False) is c finally: await l3.acquire() l4.release() async def run2(): async with Client(s.address, asynchronous=True) as c: with c.as_current(): l1.release() await l2.acquire() try: # This line runs only when both run1 and run2 are inside the # context manager assert Client.current(allow_global=False) is c finally: l3.release() await l4.acquire() await asyncio.gather(run1(), run2()) @nodebug # test timing is fragile @gen_cluster(nthreads=[("127.0.0.1", 1)] * 3, client=True) async def test_persist_workers(e, s, a, b, c): L1 = [delayed(inc)(i) for i in range(4)] total = delayed(sum)(L1) L2 = [delayed(add)(i, total) for i in L1] total2 = delayed(sum)(L2) out = e.persist( L1 + L2 + [total, total2], workers={ tuple(L1): a.address, total: b.address, tuple(L2): [c.address], total2: b.address, }, allow_other_workers=L2 + [total2], ) await wait(out) assert all(v.key in a.data for v in L1) assert total.key in b.data assert s.loose_restrictions == {total2.key} | {v.key for v in L2} @gen_cluster(nthreads=[("127.0.0.1", 1)] * 3, client=True) async def test_compute_workers(e, s, a, b, c): L1 = [delayed(inc)(i) for i in range(4)] total = delayed(sum)(L1) L2 = [delayed(add)(i, total) for i in L1] out = e.compute( L1 + L2 + [total], workers={tuple(L1): a.address, total: b.address, tuple(L2): [c.address]}, allow_other_workers=L1 + [total], ) await wait(out) for v in L1: assert s.worker_restrictions[v.key] == {a.address} for v in L2: assert s.worker_restrictions[v.key] == {c.address} assert s.worker_restrictions[total.key] == {b.address} assert s.loose_restrictions == {total.key} | {v.key for v in L1} @gen_cluster(client=True) async def test_compute_nested_containers(c, s, a, b): da = pytest.importorskip("dask.array") np = pytest.importorskip("numpy") x = da.ones(10, chunks=(5,)) + 1 future = c.compute({"x": [x], "y": 123}) result = await future assert isinstance(result, dict) assert (result["x"][0] == np.ones(10) + 1).all() assert result["y"] == 123 def test_get_restrictions(): L1 = [delayed(inc)(i) for i in range(4)] total = delayed(sum)(L1) L2 = [delayed(add)(i, total) for i in L1] r1, loose = Client.get_restrictions(L2, "127.0.0.1", False) assert r1 == {d.key: ["127.0.0.1"] for d in L2} assert not loose r1, loose = Client.get_restrictions(L2, ["127.0.0.1"], True) assert r1 == {d.key: ["127.0.0.1"] for d in L2} assert set(loose) == {d.key for d in L2} r1, loose = Client.get_restrictions(L2, {total: "127.0.0.1"}, True) assert r1 == {total.key: ["127.0.0.1"]} assert loose == [total.key] r1, loose = Client.get_restrictions(L2, {(total,): "127.0.0.1"}, True) assert r1 == {total.key: ["127.0.0.1"]} assert loose == [total.key] @gen_cluster(client=True) async def test_scatter_type(c, s, a, b): [future] = await c.scatter([1]) assert future.type == int d = await c.scatter({"x": 1.0}) assert d["x"].type == float @gen_cluster(client=True) async def test_retire_workers_2(c, s, a, b): [x] = await c.scatter([1], workers=a.address) await s.retire_workers(workers=[a.address]) assert b.data == {x.key: 1} assert s.who_has == {x.key: {b.address}} assert s.has_what == {b.address: {x.key}} assert a.address not in s.workers @gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 10) async def test_retire_many_workers(c, s, *workers): futures = await c.scatter(list(range(100))) await s.retire_workers(workers=[w.address for w in workers[:7]]) results = await c.gather(futures) assert results == list(range(100)) while len(s.workers) != 3: await asyncio.sleep(0.01) assert len(s.has_what) == len(s.nthreads) == 3 assert all(future.done() for future in futures) assert all(s.tasks[future.key].state == "memory" for future in futures) for w, keys in s.has_what.items(): assert 15 < len(keys) < 50 @gen_cluster( client=True, nthreads=[("127.0.0.1", 3)] * 2, config={"distributed.scheduler.default-task-durations": {"f": "10ms"}}, ) async def test_weight_occupancy_against_data_movement(c, s, a, b): s.extensions["stealing"]._pc.callback_time = 1000000 def f(x, y=0, z=0): sleep(0.01) return x y = await c.scatter([[1, 2, 3, 4]], workers=[a.address]) z = await c.scatter([1], workers=[b.address]) futures = c.map(f, [1, 2, 3, 4], y=y, z=z) await wait(futures) assert sum(f.key in a.data for f in futures) >= 2 assert sum(f.key in b.data for f in futures) >= 1 @gen_cluster( client=True, nthreads=[("127.0.0.1", 1), ("127.0.0.1", 10)], config={"distributed.scheduler.default-task-durations": {"f": "10ms"}}, ) async def test_distribute_tasks_by_nthreads(c, s, a, b): s.extensions["stealing"]._pc.callback_time = 1000000 def f(x, y=0): sleep(0.01) return x y = await c.scatter([1], broadcast=True) futures = c.map(f, range(20), y=y) await wait(futures) assert len(b.data) > 2 * len(a.data) @gen_cluster(client=True, clean_kwargs={"threads": False}) async def test_add_done_callback(c, s, a, b): S = set() def f(future): future.add_done_callback(g) def g(future): S.add((future.key, future.status)) u = c.submit(inc, 1, key="u") v = c.submit(throws, "hello", key="v") w = c.submit(slowinc, 2, delay=0.3, key="w") x = c.submit(inc, 3, key="x") u.add_done_callback(f) v.add_done_callback(f) w.add_done_callback(f) await wait((u, v, w, x)) x.add_done_callback(f) t = time() while len(S) < 4 and time() - t < 2.0: await asyncio.sleep(0.01) assert S == {(f.key, f.status) for f in (u, v, w, x)} @gen_cluster(client=True) async def test_normalize_collection(c, s, a, b): x = delayed(inc)(1) y = delayed(inc)(x) z = delayed(inc)(y) yy = c.persist(y) zz = c.normalize_collection(z) assert len(z.dask) == len(y.dask) + 1 assert isinstance(zz.dask[y.key], Future) assert len(zz.dask) < len(z.dask) @gen_cluster(client=True) async def test_normalize_collection_dask_array(c, s, a, b): da = pytest.importorskip("dask.array") x = da.ones(10, chunks=(5,)) y = x + 1 yy = c.persist(y) z = y.sum() zdsk = dict(z.dask) zz = c.normalize_collection(z) assert z.dask == zdsk # do not mutate input assert len(z.dask) > len(zz.dask) assert any(isinstance(v, Future) for v in zz.dask.values()) for k, v in yy.dask.items(): assert zz.dask[k].key == v.key result1 = await c.compute(z) result2 = await c.compute(zz) assert result1 == result2 @pytest.mark.slow def test_normalize_collection_with_released_futures(c): da = pytest.importorskip("dask.array") x = da.arange(2 ** 20, chunks=2 ** 10) y = x.persist() wait(y) sol = y.sum().compute() # Start releasing futures del y # Try to reuse futures. Previously this was a race condition, # and the call to `.compute()` would error out due to missing # futures on the scheduler at compute time. normalized = c.normalize_collection(x) res = normalized.sum().compute() assert res == sol @gen_cluster(client=True) async def test_auto_normalize_collection(c, s, a, b): da = pytest.importorskip("dask.array") x = da.ones(10, chunks=5) assert len(x.dask) == 2 with dask.config.set(optimizations=[c._optimize_insert_futures]): y = x.map_blocks(slowinc, delay=1, dtype=x.dtype) yy = c.persist(y) await wait(yy) start = time() future = c.compute(y.sum()) await future end = time() assert end - start < 1 start = time() z = c.persist(y + 1) await wait(z) end = time() assert end - start < 1 def test_auto_normalize_collection_sync(c): da = pytest.importorskip("dask.array") x = da.ones(10, chunks=5) y = x.map_blocks(slowinc, delay=1, dtype=x.dtype) yy = c.persist(y) wait(yy) with dask.config.set(optimizations=[c._optimize_insert_futures]): start = time() y.sum().compute() end = time() assert end - start < 1 def assert_no_data_loss(scheduler): for key, start, finish, recommendations, _ in scheduler.transition_log: if start == "memory" and finish == "released": for k, v in recommendations.items(): assert not (k == key and v == "waiting") @gen_cluster(client=True, timeout=None) async def test_interleave_computations(c, s, a, b): import distributed distributed.g = s xs = [delayed(slowinc)(i, delay=0.02) for i in range(30)] ys = [delayed(slowdec)(x, delay=0.02) for x in xs] zs = [delayed(slowadd)(x, y, delay=0.02) for x, y in zip(xs, ys)] total = delayed(sum)(zs) future = c.compute(total) done = ("memory", "released") await asyncio.sleep(0.1) x_keys = [x.key for x in xs] y_keys = [y.key for y in ys] z_keys = [z.key for z in zs] while not s.tasks or any(w.processing for w in s.workers.values()): await asyncio.sleep(0.05) x_done = sum(state in done for state in s.get_task_status(keys=x_keys).values()) y_done = sum(state in done for state in s.get_task_status(keys=y_keys).values()) z_done = sum(state in done for state in s.get_task_status(keys=z_keys).values()) assert x_done >= y_done >= z_done assert x_done < y_done + 10 assert y_done < z_done + 10 assert_no_data_loss(s) @pytest.mark.skip(reason="Now prefer first-in-first-out") @gen_cluster(client=True, timeout=None) async def test_interleave_computations_map(c, s, a, b): xs = c.map(slowinc, range(30), delay=0.02) ys = c.map(slowdec, xs, delay=0.02) zs = c.map(slowadd, xs, ys, delay=0.02) done = ("memory", "released") x_keys = [x.key for x in xs] y_keys = [y.key for y in ys] z_keys = [z.key for z in zs] while not s.tasks or any(w.processing for w in s.workers.values()): await asyncio.sleep(0.05) x_done = sum(state in done for state in s.get_task_status(keys=x_keys).values()) y_done = sum(state in done for state in s.get_task_status(keys=y_keys).values()) z_done = sum(state in done for state in s.get_task_status(keys=z_keys).values()) assert x_done >= y_done >= z_done assert x_done < y_done + 10 assert y_done < z_done + 10 @gen_cluster(client=True) async def test_scatter_dict_workers(c, s, a, b): await c.scatter({"a": 10}, workers=[a.address, b.address]) assert "a" in a.data or "a" in b.data @pytest.mark.slow @gen_test() async def test_client_timeout(): c = Client("127.0.0.1:57484", asynchronous=True) s = Scheduler(loop=c.loop, port=57484) await asyncio.sleep(4) try: await s except EnvironmentError: # port in use await c.close() return start = time() await c try: assert time() < start + 2 finally: await c.close() await s.close() @gen_cluster(client=True) async def test_submit_list_kwargs(c, s, a, b): futures = await c.scatter([1, 2, 3]) def f(L=None): return sum(L) future = c.submit(f, L=futures) result = await future assert result == 1 + 2 + 3 @gen_cluster(client=True) async def test_map_list_kwargs(c, s, a, b): futures = await c.scatter([1, 2, 3]) def f(i, L=None): return i + sum(L) futures = c.map(f, range(10), L=futures) results = await c.gather(futures) assert results == [i + 6 for i in range(10)] @gen_cluster(client=True) async def test_dont_clear_waiting_data(c, s, a, b): start = time() x = await c.scatter(1) y = c.submit(slowinc, x, delay=0.5) while y.key not in s.tasks: await asyncio.sleep(0.01) key = x.key del x for i in range(5): assert s.waiting_data[key] await asyncio.sleep(0) @gen_cluster(client=True) async def test_get_future_error_simple(c, s, a, b): f = c.submit(div, 1, 0) await wait(f) assert f.status == "error" function, args, kwargs, deps = await c._get_futures_error(f) # args contains only solid values, not keys assert function.__name__ == "div" with pytest.raises(ZeroDivisionError): function(*args, **kwargs) @gen_cluster(client=True) async def test_get_futures_error(c, s, a, b): x0 = delayed(dec)(2, dask_key_name="x0") y0 = delayed(dec)(1, dask_key_name="y0") x = delayed(div)(1, x0, dask_key_name="x") y = delayed(div)(1, y0, dask_key_name="y") tot = delayed(sum)(x, y, dask_key_name="tot") f = c.compute(tot) await wait(f) assert f.status == "error" function, args, kwargs, deps = await c._get_futures_error(f) assert function.__name__ == "div" assert args == (1, y0.key) @gen_cluster(client=True) async def test_recreate_error_delayed(c, s, a, b): x0 = delayed(dec)(2) y0 = delayed(dec)(1) x = delayed(div)(1, x0) y = delayed(div)(1, y0) tot = delayed(sum)(x, y) f = c.compute(tot) assert f.status == "pending" function, args, kwargs = await c._recreate_error_locally(f) assert f.status == "error" assert function.__name__ == "div" assert args == (1, 0) with pytest.raises(ZeroDivisionError): function(*args, **kwargs) @gen_cluster(client=True) async def test_recreate_error_futures(c, s, a, b): x0 = c.submit(dec, 2) y0 = c.submit(dec, 1) x = c.submit(div, 1, x0) y = c.submit(div, 1, y0) tot = c.submit(sum, x, y) f = c.compute(tot) assert f.status == "pending" function, args, kwargs = await c._recreate_error_locally(f) assert f.status == "error" assert function.__name__ == "div" assert args == (1, 0) with pytest.raises(ZeroDivisionError): function(*args, **kwargs) @gen_cluster(client=True) async def test_recreate_error_collection(c, s, a, b): b = db.range(10, npartitions=4) b = b.map(lambda x: 1 / x) b = b.persist() f = c.compute(b) function, args, kwargs = await c._recreate_error_locally(f) with pytest.raises(ZeroDivisionError): function(*args, **kwargs) dd = pytest.importorskip("dask.dataframe") import pandas as pd df = dd.from_pandas(pd.DataFrame({"a": [0, 1, 2, 3, 4]}), chunksize=2) def make_err(x): # because pandas would happily work with NaN if x == 0: raise ValueError return x df2 = df.a.map(make_err) f = c.compute(df2) function, args, kwargs = await c._recreate_error_locally(f) with pytest.raises(ValueError): function(*args, **kwargs) # with persist df3 = c.persist(df2) function, args, kwargs = await c._recreate_error_locally(df3) with pytest.raises(ValueError): function(*args, **kwargs) @gen_cluster(client=True) async def test_recreate_error_array(c, s, a, b): da = pytest.importorskip("dask.array") pytest.importorskip("scipy") z = (da.linalg.inv(da.zeros((10, 10), chunks=10)) + 1).sum() zz = z.persist() func, args, kwargs = await c._recreate_error_locally(zz) assert "0.,0.,0." in str(args).replace(" ", "") # args contain actual arrays def test_recreate_error_sync(c): x0 = c.submit(dec, 2) y0 = c.submit(dec, 1) x = c.submit(div, 1, x0) y = c.submit(div, 1, y0) tot = c.submit(sum, x, y) f = c.compute(tot) with pytest.raises(ZeroDivisionError): c.recreate_error_locally(f) assert f.status == "error" def test_recreate_error_not_error(c): f = c.submit(dec, 2) with pytest.raises(ValueError, match="No errored futures passed"): c.recreate_error_locally(f) @gen_cluster(client=True) async def test_retire_workers(c, s, a, b): assert set(s.workers) == {a.address, b.address} await c.retire_workers(workers=[a.address], close_workers=True) assert set(s.workers) == {b.address} start = time() while a.status != Status.closed: await asyncio.sleep(0.01) assert time() < start + 5 class MyException(Exception): pass @gen_cluster(client=True) async def test_robust_unserializable(c, s, a, b): class Foo: def __getstate__(self): raise MyException() with pytest.raises(MyException): future = c.submit(identity, Foo()) futures = c.map(inc, range(10)) results = await c.gather(futures) assert results == list(map(inc, range(10))) assert a.data and b.data @gen_cluster(client=True) async def test_robust_undeserializable(c, s, a, b): class Foo: def __getstate__(self): return 1 def __setstate__(self, state): raise MyException("hello") future = c.submit(identity, Foo()) with pytest.raises(MyException): await future futures = c.map(inc, range(10)) results = await c.gather(futures) assert results == list(map(inc, range(10))) assert a.data and b.data @gen_cluster(client=True) async def test_robust_undeserializable_function(c, s, a, b): class Foo: def __getstate__(self): return 1 def __setstate__(self, state): raise MyException("hello") def __call__(self, *args): return 1 future = c.submit(Foo(), 1) with pytest.raises(MyException): await future futures = c.map(inc, range(10)) results = await c.gather(futures) assert results == list(map(inc, range(10))) assert a.data and b.data @gen_cluster(client=True) async def test_fire_and_forget(c, s, a, b): future = c.submit(slowinc, 1, delay=0.1) import distributed def f(x): distributed.foo = 123 try: fire_and_forget(c.submit(f, future)) start = time() while not hasattr(distributed, "foo"): await asyncio.sleep(0.01) assert time() < start + 2 assert distributed.foo == 123 finally: del distributed.foo start = time() while len(s.tasks) > 1: await asyncio.sleep(0.01) assert time() < start + 2 assert set(s.who_wants) == {future.key} assert set(s.tasks) == {future.key} @gen_cluster(client=True) async def test_fire_and_forget_err(c, s, a, b): fire_and_forget(c.submit(div, 1, 0)) await asyncio.sleep(0.1) # erred task should clear out quickly start = time() while s.tasks: await asyncio.sleep(0.01) assert time() < start + 1 def test_quiet_client_close(loop): with captured_logger(logging.getLogger("distributed")) as logger: with Client(loop=loop, processes=False, threads_per_worker=4) as c: futures = c.map(slowinc, range(1000), delay=0.01) sleep(0.200) # stop part-way sleep(0.1) # let things settle out = logger.getvalue() lines = out.strip().split("\n") assert len(lines) <= 2 for line in lines: assert ( not line or "Reconnecting" in line or "garbage" in line or set(line) == {"-"} ), line @pytest.mark.slow def test_quiet_client_close_when_cluster_is_closed_before_client(loop): with captured_logger(logging.getLogger("tornado.application")) as logger: cluster = LocalCluster(loop=loop, n_workers=1, dashboard_address=":0") client = Client(cluster, loop=loop) cluster.close() client.close() out = logger.getvalue() assert "CancelledError" not in out @gen_cluster() async def test_close(s, a, b): c = await Client(s.address, asynchronous=True) future = c.submit(inc, 1) await wait(future) assert c.id in s.wants_what await c.close() start = time() while c.id in s.wants_what or s.tasks: await asyncio.sleep(0.01) assert time() < start + 5 def test_threadsafe(c): def f(_): d = deque(maxlen=50) for i in range(100): future = c.submit(inc, random.randint(0, 100)) d.append(future) sleep(0.001) c.gather(list(d)) total = c.submit(sum, list(d)) return total.result() from concurrent.futures import ThreadPoolExecutor with ThreadPoolExecutor(20) as e: results = list(e.map(f, range(20))) assert results and all(results) del results @pytest.mark.slow def test_threadsafe_get(c): da = pytest.importorskip("dask.array") x = da.arange(100, chunks=(10,)) def f(_): total = 0 for i in range(20): total += (x + random.randint(0, 20)).sum().compute() sleep(0.001) return total from concurrent.futures import ThreadPoolExecutor with ThreadPoolExecutor(30) as e: results = list(e.map(f, range(30))) assert results and all(results) @pytest.mark.slow def test_threadsafe_compute(c): da = pytest.importorskip("dask.array") x = da.arange(100, chunks=(10,)) def f(_): total = 0 for i in range(20): future = c.compute((x + random.randint(0, 20)).sum()) total += future.result() sleep(0.001) return total from concurrent.futures import ThreadPoolExecutor e = ThreadPoolExecutor(30) results = list(e.map(f, range(30))) assert results and all(results) @gen_cluster(client=True) async def test_identity(c, s, a, b): assert c.id.lower().startswith("client") assert a.id.lower().startswith("worker") assert b.id.lower().startswith("worker") assert s.id.lower().startswith("scheduler") @gen_cluster(client=True, nthreads=[("127.0.0.1", 4)] * 2) async def test_get_client(c, s, a, b): assert get_client() is c assert c.asynchronous def f(x): client = get_client() future = client.submit(inc, x) import distributed assert not client.asynchronous assert client is distributed.tmp_client return future.result() import distributed distributed.tmp_client = c try: futures = c.map(f, range(5)) results = await c.gather(futures) assert results == list(map(inc, range(5))) finally: del distributed.tmp_client def test_get_client_no_cluster(): # Clean up any global workers added by other tests. This test requires that # there are no global workers. Worker._instances.clear() msg = "No global client found and no address provided" with pytest.raises(ValueError, match=r"^{}$".format(msg)): get_client() @gen_cluster(client=True) async def test_serialize_collections(c, s, a, b): da = pytest.importorskip("dask.array") x = da.arange(10, chunks=(5,)).persist() def f(x): assert isinstance(x, da.Array) return x.sum().compute() future = c.submit(f, x) result = await future assert result == sum(range(10)) @gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 1, timeout=100) async def test_secede_simple(c, s, a): def f(): client = get_client() secede() return client.submit(inc, 1).result() result = await c.submit(f) assert result == 2 @pytest.mark.slow @gen_cluster(client=True, nthreads=[("127.0.0.1", 1)] * 2, timeout=60) async def test_secede_balances(c, s, a, b): count = threading.active_count() def f(x): client = get_client() sleep(0.01) # do some work secede() futures = client.map(slowinc, range(10), pure=False, delay=0.01) total = client.submit(sum, futures).result() return total futures = c.map(f, range(100)) start = time() while not all(f.status == "finished" for f in futures): await asyncio.sleep(0.01) assert threading.active_count() < count + 50 assert len(a.log) < 2 * len(b.log) assert len(b.log) < 2 * len(a.log) results = await c.gather(futures) assert results == [sum(map(inc, range(10)))] * 100 @gen_cluster(client=True) async def test_sub_submit_priority(c, s, a, b): def f(): client = get_client() client.submit(slowinc, 1, delay=0.2, key="slowinc") future = c.submit(f, key="f") await asyncio.sleep(0.1) if len(s.tasks) == 2: assert ( s.priorities["f"] > s.priorities["slowinc"] ) # lower values schedule first def test_get_client_sync(c, s, a, b): results = c.run(lambda: get_worker().scheduler.address) assert results == {w["address"]: s["address"] for w in [a, b]} results = c.run(lambda: get_client().scheduler.address) assert results == {w["address"]: s["address"] for w in [a, b]} @gen_cluster(client=True) async def test_serialize_collections_of_futures(c, s, a, b): pd = pytest.importorskip("pandas") dd = pytest.importorskip("dask.dataframe") from dask.dataframe.utils import assert_eq df = pd.DataFrame({"x": [1, 2, 3]}) ddf = dd.from_pandas(df, npartitions=2).persist() future = await c.scatter(ddf) ddf2 = await future df2 = await c.compute(ddf2) assert_eq(df, df2) def test_serialize_collections_of_futures_sync(c): pd = pytest.importorskip("pandas") dd = pytest.importorskip("dask.dataframe") from dask.dataframe.utils import assert_eq df = pd.DataFrame({"x": [1, 2, 3]}) ddf = dd.from_pandas(df, npartitions=2).persist() future = c.scatter(ddf) result = future.result() assert_eq(result.compute(), df) assert future.type == dd.DataFrame assert c.submit(lambda x, y: assert_eq(x.compute(), y), future, df).result() def _dynamic_workload(x, delay=0.01): if delay == "random": sleep(random.random() / 2) else: sleep(delay) if x > 4: return 4 secede() client = get_client() futures = client.map( _dynamic_workload, [x + i + 1 for i in range(2)], pure=False, delay=delay ) total = client.submit(sum, futures) return total.result() def _test_dynamic_workloads_sync(c, delay): future = c.submit(_dynamic_workload, 0, delay=delay) assert future.result(timeout=40) == 52 def test_dynamic_workloads_sync(c): _test_dynamic_workloads_sync(c, delay=0.02) @pytest.mark.slow def test_dynamic_workloads_sync_random(c): _test_dynamic_workloads_sync(c, delay="random") @gen_cluster(client=True) async def test_bytes_keys(c, s, a, b): key = b"inc-123" future = c.submit(inc, 1, key=key) result = await future assert type(future.key) is bytes assert set(s.tasks) == {key} assert key in a.data or key in b.data assert result == 2 @gen_cluster(client=True) async def test_unicode_ascii_keys(c, s, a, b): uni_type = type("") key = "inc-123" future = c.submit(inc, 1, key=key) result = await future assert type(future.key) is uni_type assert set(s.tasks) == {key} assert key in a.data or key in b.data assert result == 2 @gen_cluster(client=True) async def test_unicode_keys(c, s, a, b): uni_type = type("") key = "inc-123\u03bc" future = c.submit(inc, 1, key=key) result = await future assert type(future.key) is uni_type assert set(s.tasks) == {key} assert key in a.data or key in b.data assert result == 2 future2 = c.submit(inc, future) result2 = await future2 assert result2 == 3 future3 = await c.scatter({"data-123": 123}) result3 = await future3["data-123"] assert result3 == 123 def test_use_synchronous_client_in_async_context(loop, c): async def f(): x = await c.scatter(123) y = c.submit(inc, x) z = await c.gather(y) return z z = sync(loop, f) assert z == 124 def test_quiet_quit_when_cluster_leaves(loop_in_thread): loop = loop_in_thread with LocalCluster( loop=loop, scheduler_port=0, dashboard_address=None, silence_logs=False ) as cluster: with captured_logger("distributed.comm") as sio: with Client(cluster, loop=loop) as client: futures = client.map(lambda x: x + 1, range(10)) sleep(0.05) cluster.close() sleep(0.05) text = sio.getvalue() assert not text def test_warn_executor(loop, s, a, b): with warnings.catch_warnings(record=True) as record: with Executor(s["address"], loop=loop) as c: pass assert any("Client" in str(r.message) for r in record) @gen_cluster([("127.0.0.1", 4)] * 2, client=True) async def test_call_stack_future(c, s, a, b): x = c.submit(slowdec, 1, delay=0.5) future = c.submit(slowinc, 1, delay=0.5) await asyncio.sleep(0.1) results = await asyncio.gather( c.call_stack(future), c.call_stack(keys=[future.key]) ) assert all(list(first(result.values())) == [future.key] for result in results) assert results[0] == results[1] result = results[0] w = a if future.key in a.executing else b assert list(result) == [w.address] assert list(result[w.address]) == [future.key] assert "slowinc" in str(result) assert "slowdec" not in str(result) @gen_cluster([("127.0.0.1", 4)] * 2, client=True) async def test_call_stack_all(c, s, a, b): future = c.submit(slowinc, 1, delay=0.8) while not a.executing and not b.executing: await asyncio.sleep(0.01) result = await c.call_stack() w = a if a.executing else b assert list(result) == [w.address] assert list(result[w.address]) == [future.key] assert "slowinc" in str(result) @gen_cluster([("127.0.0.1", 4)] * 2, client=True) async def test_call_stack_collections(c, s, a, b): da = pytest.importorskip("dask.array") x = da.random.random(100, chunks=(10,)).map_blocks(slowinc, delay=0.5).persist() while not a.executing and not b.executing: await asyncio.sleep(0.001) result = await c.call_stack(x) assert result @gen_cluster([("127.0.0.1", 4)] * 2, client=True) async def test_call_stack_collections_all(c, s, a, b): da = pytest.importorskip("dask.array") x = da.random.random(100, chunks=(10,)).map_blocks(slowinc, delay=0.5).persist() while not a.executing and not b.executing: await asyncio.sleep(0.001) result = await c.call_stack() assert result @gen_cluster(client=True, worker_kwargs={"profile_cycle_interval": "100ms"}) async def test_profile(c, s, a, b): futures = c.map(slowinc, range(10), delay=0.05, workers=a.address) await wait(futures) x = await c.profile(start=time() + 10, stop=time() + 20) assert not x["count"] x = await c.profile(start=0, stop=time()) assert ( x["count"] == sum(p["count"] for _, p in a.profile_history) + a.profile_recent["count"] ) y = await c.profile(start=time() - 0.300, stop=time()) assert 0 < y["count"] < x["count"] assert not any(p["count"] for _, p in b.profile_history) result = await c.profile(workers=b.address) assert not result["count"] @gen_cluster(client=True, worker_kwargs={"profile_cycle_interval": "100ms"}) async def test_profile_keys(c, s, a, b): x = c.map(slowinc, range(10), delay=0.05, workers=a.address) y = c.map(slowdec, range(10), delay=0.05, workers=a.address) await wait(x + y) xp = await c.profile("slowinc") yp = await c.profile("slowdec") p = await c.profile() assert p["count"] == xp["count"] + yp["count"] with captured_logger(logging.getLogger("distributed")) as logger: prof = await c.profile("does-not-exist") assert prof == profile.create() out = logger.getvalue() assert not out @gen_cluster() async def test_client_with_name(s, a, b): with captured_logger("distributed.scheduler") as sio: client = await Client(s.address, asynchronous=True, name="foo") assert "foo" in client.id await client.close() text = sio.getvalue() assert "foo" in text @gen_cluster(client=True) async def test_future_defaults_to_default_client(c, s, a, b): x = c.submit(inc, 1) await wait(x) future = Future(x.key) assert future.client is c @gen_cluster(client=True) async def test_future_auto_inform(c, s, a, b): x = c.submit(inc, 1) await wait(x) client = await Client(s.address, asynchronous=True) future = Future(x.key, client) start = time() while future.status != "finished": await asyncio.sleep(0.01) assert time() < start + 1 await client.close() def test_client_async_before_loop_starts(): with pristine_loop() as loop: client = Client(asynchronous=True, loop=loop) assert client.asynchronous client.close() @pytest.mark.slow @gen_cluster(client=True, Worker=Nanny, timeout=60, nthreads=[("127.0.0.1", 3)] * 2) async def test_nested_compute(c, s, a, b): def fib(x): assert get_worker().get_current_task() if x < 2: return x a = delayed(fib)(x - 1) b = delayed(fib)(x - 2) c = a + b return c.compute() future = c.submit(fib, 8) result = await future assert result == 21 assert len(s.transition_log) > 50 @gen_cluster(client=True) async def test_task_metadata(c, s, a, b): await c.set_metadata("x", 1) result = await c.get_metadata("x") assert result == 1 future = c.submit(inc, 1) key = future.key await wait(future) await c.set_metadata(key, 123) result = await c.get_metadata(key) assert result == 123 del future while key in s.tasks: await asyncio.sleep(0.01) with pytest.raises(KeyError): await c.get_metadata(key) result = await c.get_metadata(key, None) assert result is None await c.set_metadata(["x", "a"], 1) result = await c.get_metadata("x") assert result == {"a": 1} await c.set_metadata(["x", "b"], 2) result = await c.get_metadata("x") assert result == {"a": 1, "b": 2} result = await c.get_metadata(["x", "a"]) assert result == 1 await c.set_metadata(["x", "a", "c", "d"], 1) result = await c.get_metadata("x") assert result == {"a": {"c": {"d": 1}}, "b": 2} @gen_cluster(client=True, Worker=Nanny) async def test_logs(c, s, a, b): await wait(c.map(inc, range(5))) logs = await c.get_scheduler_logs(n=5) assert logs for _, msg in logs: assert "distributed.scheduler" in msg w_logs = await c.get_worker_logs(n=5) assert set(w_logs.keys()) == {a.worker_address, b.worker_address} for log in w_logs.values(): for _, msg in log: assert "distributed.worker" in msg n_logs = await c.get_worker_logs(nanny=True) assert set(n_logs.keys()) == {a.worker_address, b.worker_address} for log in n_logs.values(): for _, msg in log: assert "distributed.nanny" in msg n_logs = await c.get_worker_logs(nanny=True, workers=[a.worker_address]) assert set(n_logs.keys()) == {a.worker_address} for log in n_logs.values(): for _, msg in log: assert "distributed.nanny" in msg @gen_cluster(client=True) async def test_avoid_delayed_finalize(c, s, a, b): x = delayed(inc)(1) future = c.compute(x) result = await future assert result == 2 assert list(s.tasks) == [future.key] == [x.key] @gen_cluster() async def test_config_scheduler_address(s, a, b): with dask.config.set({"scheduler-address": s.address}): with captured_logger("distributed.client") as sio: c = await Client(asynchronous=True) assert c.scheduler.address == s.address text = sio.getvalue() assert s.address in text await c.close() @gen_cluster(client=True) async def test_warn_when_submitting_large_values(c, s, a, b): with warnings.catch_warnings(record=True) as record: future = c.submit(lambda x: x + 1, b"0" * 2000000) text = str(record[0].message) assert "2.00 MB" in text assert "large" in text assert "..." in text assert "'000" in text assert "000'" in text assert len(text) < 2000 with warnings.catch_warnings(record=True) as record: data = b"0" * 2000000 for i in range(10): future = c.submit(lambda x, y: x, data, i) assert len(record) < 2 @gen_cluster() async def test_scatter_direct(s, a, b): c = await Client(s.address, asynchronous=True, heartbeat_interval=10) last = s.clients[c.id].last_seen start = time() while s.clients[c.id].last_seen == last: await asyncio.sleep(0.10) assert time() < start + 5 await c.close() @gen_cluster(client=True) async def test_unhashable_function(c, s, a, b): d = {"a": 1} result = await c.submit(d.get, "a") assert result == 1 @gen_cluster() async def test_client_name(s, a, b): with dask.config.set({"client-name": "hello-world"}): c = await Client(s.address, asynchronous=True) assert any("hello-world" in name for name in list(s.clients)) await c.close() def test_client_doesnt_close_given_loop(loop, s, a, b): with Client(s["address"], loop=loop) as c: assert c.submit(inc, 1).result() == 2 with Client(s["address"], loop=loop) as c: assert c.submit(inc, 2).result() == 3 @gen_cluster(client=True, nthreads=[]) async def test_quiet_scheduler_loss(c, s): c._periodic_callbacks["scheduler-info"].interval = 10 with captured_logger(logging.getLogger("distributed.client")) as logger: await s.close() await c._update_scheduler_info() text = logger.getvalue() assert "BrokenPipeError" not in text def test_dashboard_link(loop, monkeypatch): monkeypatch.setenv("USER", "myusername") with cluster(scheduler_kwargs={"dashboard_address": ":12355"}) as (s, [a, b]): with Client(s["address"], loop=loop) as c: with dask.config.set( {"distributed.dashboard.link": "{scheme}://foo-{USER}:{port}/status"} ): link = "http://foo-myusername:12355/status" assert link == c.dashboard_link text = c._repr_html_() assert link in text @pytest.mark.asyncio async def test_dashboard_link_inproc(cleanup): async with Client(processes=False, asynchronous=True) as c: with dask.config.set({"distributed.dashboard.link": "{host}"}): assert "/" not in c.dashboard_link @gen_test() async def test_client_timeout_2(): with dask.config.set({"distributed.comm.timeouts.connect": "10ms"}): start = time() c = Client("127.0.0.1:3755", asynchronous=True) with pytest.raises((TimeoutError, IOError)): await c stop = time() assert c.status == "closed" await c.close() assert stop - start < 1 @gen_test() async def test_client_active_bad_port(): import tornado.web import tornado.httpserver application = tornado.web.Application([(r"/", tornado.web.RequestHandler)]) http_server = tornado.httpserver.HTTPServer(application) http_server.listen(8080) with dask.config.set({"distributed.comm.timeouts.connect": "10ms"}): c = Client("127.0.0.1:8080", asynchronous=True) with pytest.raises((TimeoutError, IOError)): await c await c._close(fast=True) http_server.stop() @pytest.mark.parametrize("direct", [True, False]) def test_turn_off_pickle(direct): @gen_cluster() async def test(s, a, b): import numpy as np async with Client( s.address, asynchronous=True, serializers=["dask", "msgpack"] ) as c: assert (await c.submit(inc, 1)) == 2 await c.submit(np.ones, 5) await c.scatter(1) # Can't send complex data with pytest.raises(TypeError): future = await c.scatter(inc) # can send complex tasks (this uses pickle regardless) future = c.submit(lambda x: x, inc) await wait(future) # but can't receive complex results with pytest.raises(TypeError): await c.gather(future, direct=direct) # Run works result = await c.run(lambda: 1) assert list(result.values()) == [1, 1] result = await c.run_on_scheduler(lambda: 1) assert result == 1 # But not with complex return values with pytest.raises(TypeError): await c.run(lambda: inc) with pytest.raises(TypeError): await c.run_on_scheduler(lambda: inc) test() @gen_cluster() async def test_de_serialization(s, a, b): import numpy as np c = await Client( s.address, asynchronous=True, serializers=["msgpack", "pickle"], deserializers=["msgpack"], ) try: # Can send complex data future = await c.scatter(np.ones(5)) # But can not retrieve it with pytest.raises(TypeError): result = await future finally: await c.close() @gen_cluster() async def test_de_serialization_none(s, a, b): import numpy as np c = await Client(s.address, asynchronous=True, deserializers=["msgpack"]) try: # Can send complex data future = await c.scatter(np.ones(5)) # But can not retrieve it with pytest.raises(TypeError): result = await future finally: await c.close() @gen_cluster() async def test_client_repr_closed(s, a, b): c = await Client(s.address, asynchronous=True) await c.close() c._repr_html_() def test_client_repr_closed_sync(loop): with Client(loop=loop, processes=False, dashboard_address=None) as c: c.close() c._repr_html_() @gen_cluster(client=True, nthreads=[("127.0.0.1", 1)]) async def test_nested_prioritization(c, s, w): x = delayed(inc)(1, dask_key_name=("a", 2)) y = delayed(inc)(2, dask_key_name=("a", 10)) o = dask.order.order(merge(x.__dask_graph__(), y.__dask_graph__())) fx, fy = c.compute([x, y]) await wait([fx, fy]) assert (o[x.key] < o[y.key]) == ( s.tasks[tokey(fx.key)].priority < s.tasks[tokey(fy.key)].priority ) @gen_cluster(client=True) async def test_scatter_error_cancel(c, s, a, b): # https://github.com/dask/distributed/issues/2038 def bad_fn(x): raise Exception("lol") x = await c.scatter(1) y = c.submit(bad_fn, x) del x await wait(y) assert y.status == "error" await asyncio.sleep(0.1) assert y.status == "error" # not cancelled def test_no_threads_lingering(): active = dict(threading._active) assert threading.active_count() < 40, list(active.values()) @gen_cluster() async def test_direct_async(s, a, b): c = await Client(s.address, asynchronous=True, direct_to_workers=True) assert c.direct_to_workers await c.close() c = await Client(s.address, asynchronous=True, direct_to_workers=False) assert not c.direct_to_workers await c.close() def test_direct_sync(c): assert not c.direct_to_workers def f(): return get_client().direct_to_workers assert c.submit(f).result() @gen_cluster() async def test_mixing_clients(s, a, b): c1 = await Client(s.address, asynchronous=True) c2 = await Client(s.address, asynchronous=True) future = c1.submit(inc, 1) with pytest.raises(ValueError): c2.submit(inc, future) assert not c2.futures # Don't create Futures on second Client await c1.close() await c2.close() @gen_cluster(client=True) async def test_tuple_keys(c, s, a, b): x = dask.delayed(inc)(1, dask_key_name=("x", 1)) y = dask.delayed(inc)(x, dask_key_name=("y", 1)) future = c.compute(y) assert (await future) == 3 @gen_cluster(client=True) async def test_multiple_scatter(c, s, a, b): futures = await asyncio.gather(*[c.scatter(1, direct=True) for _ in range(5)]) x = await futures[0] x = await futures[0] @gen_cluster(client=True) async def test_map_large_kwargs_in_graph(c, s, a, b): np = pytest.importorskip("numpy") x = np.random.random(100000) futures = c.map(lambda a, b: a + b, range(100), b=x) while not s.tasks: await asyncio.sleep(0.01) assert len(s.tasks) == 101 assert any(k.startswith("ndarray") for k in s.tasks) @gen_cluster(client=True) async def test_retry(c, s, a, b): def f(): assert dask.config.get("foo") with dask.config.set(foo=False): future = c.submit(f) with pytest.raises(AssertionError): await future with dask.config.set(foo=True): await future.retry() await future @gen_cluster(client=True) async def test_retry_dependencies(c, s, a, b): def f(): return dask.config.get("foo") x = c.submit(f) y = c.submit(inc, x) with pytest.raises(KeyError): await y with dask.config.set(foo=100): await y.retry() result = await y assert result == 101 await y.retry() await x.retry() result = await y assert result == 101 @gen_cluster(client=True) async def test_released_dependencies(c, s, a, b): def f(x): return dask.config.get("foo") + 1 x = c.submit(inc, 1, key="x") y = c.submit(f, x, key="y") del x with pytest.raises(KeyError): await y with dask.config.set(foo=100): await y.retry() result = await y assert result == 101 @gen_cluster(client=True, clean_kwargs={"threads": False}) async def test_profile_bokeh(c, s, a, b): pytest.importorskip("bokeh.plotting") from bokeh.model import Model await c.gather(c.map(slowinc, range(10), delay=0.2)) state, figure = await c.profile(plot=True) assert isinstance(figure, Model) with tmpfile("html") as fn: try: await c.profile(filename=fn) except PermissionError: if WINDOWS: pytest.xfail() assert os.path.exists(fn) @gen_cluster(client=True) async def test_get_mix_futures_and_SubgraphCallable(c, s, a, b): future = c.submit(add, 1, 2) subgraph = SubgraphCallable( {"_2": (add, "_0", "_1"), "_3": (add, future, "_2")}, "_3", ("_0", "_1") ) dsk = {"a": 1, "b": 2, "c": (subgraph, "a", "b"), "d": (subgraph, "c", "b")} future2 = c.get(dsk, "d", sync=False) result = await future2 assert result == 11 # Nested subgraphs subgraph2 = SubgraphCallable( { "_2": (subgraph, "_0", "_1"), "_3": (subgraph, "_2", "_1"), "_4": (add, "_3", future2), }, "_4", ("_0", "_1"), ) dsk2 = {"e": 1, "f": 2, "g": (subgraph2, "e", "f")} result = await c.get(dsk2, "g", sync=False) assert result == 22 @gen_cluster(client=True) async def test_get_mix_futures_and_SubgraphCallable_dask_dataframe(c, s, a, b): dd = pytest.importorskip("dask.dataframe") import pandas as pd df = pd.DataFrame({"x": range(1, 11)}) ddf = dd.from_pandas(df, npartitions=2).persist() ddf = ddf.map_partitions(lambda x: x) ddf["x"] = ddf["x"].astype("f8") ddf = ddf.map_partitions(lambda x: x) ddf["x"] = ddf["x"].astype("f8") result = await c.compute(ddf) assert result.equals(df.astype("f8")) def test_direct_to_workers(s, loop): with Client(s["address"], loop=loop, direct_to_workers=True) as client: future = client.scatter(1) future.result() resp = client.run_on_scheduler(lambda dask_scheduler: dask_scheduler.events) assert "gather" not in str(resp) @gen_cluster(client=True) async def test_instances(c, s, a, b): assert list(Client._instances) == [c] assert list(Scheduler._instances) == [s] assert set(Worker._instances) == {a, b} @gen_cluster(client=True) async def test_wait_for_workers(c, s, a, b): future = asyncio.ensure_future(c.wait_for_workers(n_workers=3)) await asyncio.sleep(0.22) # 2 chances assert not future.done() w = await Worker(s.address) start = time() await future assert time() < start + 1 await w.close() with pytest.raises(TimeoutError) as info: await c.wait_for_workers(n_workers=10, timeout="1 ms") assert "2/10" in str(info.value).replace(" ", "") assert "1 ms" in str(info.value) @pytest.mark.skipif(WINDOWS, reason="num_fds not supported on windows") @pytest.mark.asyncio @pytest.mark.parametrize("Worker", [Worker, Nanny]) async def test_file_descriptors_dont_leak(Worker): pytest.importorskip("pandas") df = dask.datasets.timeseries(freq="10s", dtypes={"x": int, "y": float}) proc = psutil.Process() start = proc.num_fds() async with Scheduler(port=0, dashboard_address=":0") as s: async with Worker(s.address, nthreads=2) as a, Worker( s.address, nthreads=2 ) as b: async with Client(s.address, asynchronous=True) as c: await df.sum().persist() begin = time() while proc.num_fds() > begin: await asyncio.sleep(0.01) assert time() < begin + 5, (start, proc.num_fds()) @pytest.mark.asyncio async def test_dashboard_link_cluster(cleanup): class MyCluster(LocalCluster): @property def dashboard_link(self): return "http://foo.com" async with MyCluster(processes=False, asynchronous=True) as cluster: async with Client(cluster, asynchronous=True) as client: assert "http://foo.com" in client._repr_html_() @pytest.mark.asyncio async def test_shutdown(cleanup): async with Scheduler(port=0) as s: async with Worker(s.address) as w: async with Client(s.address, asynchronous=True) as c: await c.shutdown() assert s.status == Status.closed assert w.status == Status.closed @pytest.mark.asyncio async def test_shutdown_localcluster(cleanup): async with LocalCluster(n_workers=1, asynchronous=True, processes=False) as lc: async with Client(lc, asynchronous=True) as c: await c.shutdown() assert lc.scheduler.status == Status.closed @pytest.mark.asyncio async def test_config_inherited_by_subprocess(cleanup): def f(x): return dask.config.get("foo") + 1 with dask.config.set(foo=100): async with LocalCluster(n_workers=1, asynchronous=True, processes=True) as lc: async with Client(lc, asynchronous=True) as c: result = await c.submit(f, 1) assert result == 101 @gen_cluster(client=True) async def test_futures_of_sorted(c, s, a, b): pytest.importorskip("dask.dataframe") df = await dask.datasets.timeseries(dtypes={"x": int}).persist() futures = futures_of(df) for k, f in zip(df.__dask_keys__(), futures): assert str(k) in str(f) @gen_cluster(client=True, worker_kwargs={"profile_cycle_interval": "10ms"}) async def test_profile_server(c, s, a, b): for i in range(5): try: x = c.map(slowinc, range(10), delay=0.01, workers=a.address, pure=False) await wait(x) await asyncio.gather( c.run(slowinc, 1, delay=0.5), c.run_on_scheduler(slowdec, 1, delay=0.5) ) p = await c.profile(server=True) # All worker servers assert "slowinc" in str(p) p = await c.profile(scheduler=True) # Scheduler assert "slowdec" in str(p) except AssertionError: if i == 4: raise else: pass else: break @gen_cluster(client=True) async def test_await_future(c, s, a, b): future = c.submit(inc, 1) async def f(): # flake8: noqa result = await future assert result == 2 await f() future = c.submit(div, 1, 0) async def f(): with pytest.raises(ZeroDivisionError): await future await f() @gen_cluster(client=True) async def test_as_completed_async_for(c, s, a, b): futures = c.map(inc, range(10)) ac = as_completed(futures) results = [] async def f(): async for future in ac: result = await future results.append(result) await f() assert set(results) == set(range(1, 11)) @gen_cluster(client=True) async def test_as_completed_async_for_results(c, s, a, b): futures = c.map(inc, range(10)) ac = as_completed(futures, with_results=True) results = [] async def f(): async for future, result in ac: results.append(result) await f() assert set(results) == set(range(1, 11)) assert not s.counters["op"].components[0]["gather"] @gen_cluster(client=True) async def test_as_completed_async_for_cancel(c, s, a, b): x = c.submit(inc, 1) y = c.submit(sleep, 0.3) ac = as_completed([x, y]) async def _(): await asyncio.sleep(0.1) await y.cancel(asynchronous=True) c.loop.add_callback(_) L = [] async def f(): async for future in ac: L.append(future) await f() assert L == [x, y] def test_async_with(loop): result = None client = None cluster = None async def f(): async with Client(processes=False, asynchronous=True) as c: nonlocal result, client, cluster result = await c.submit(lambda x: x + 1, 10) client = c cluster = c.cluster loop.run_sync(f) assert result == 11 assert client.status == "closed" assert cluster.status == Status.closed def test_client_sync_with_async_def(loop): async def ff(): await asyncio.sleep(0.01) return 1 with cluster() as (s, [a, b]): with Client(s["address"], loop=loop) as c: assert sync(loop, ff) == 1 assert c.sync(ff) == 1 @pytest.mark.skip(reason="known intermittent failure") @gen_cluster(client=True) async def test_dont_hold_on_to_large_messages(c, s, a, b): np = pytest.importorskip("numpy") da = pytest.importorskip("dask.array") x = np.random.random(1000000) xr = weakref.ref(x) d = da.from_array(x, chunks=(100000,)) d = d.persist() del x start = time() while xr() is not None: if time() > start + 5: # Help diagnosing from types import FrameType x = xr() if x is not None: del x rc = sys.getrefcount(xr()) refs = gc.get_referrers(xr()) print("refs to x:", rc, refs, gc.isenabled()) frames = [r for r in refs if isinstance(r, FrameType)] for i, f in enumerate(frames): print( "frames #%d:" % i, f.f_code.co_name, f.f_code.co_filename, sorted(f.f_locals), ) pytest.fail("array should have been destroyed") await asyncio.sleep(0.200) @gen_cluster(client=True) async def test_run_scheduler_async_def(c, s, a, b): async def f(dask_scheduler): await asyncio.sleep(0.01) dask_scheduler.foo = "bar" await c.run_on_scheduler(f) assert s.foo == "bar" async def f(dask_worker): await asyncio.sleep(0.01) dask_worker.foo = "bar" await c.run(f) assert a.foo == "bar" assert b.foo == "bar" @gen_cluster(client=True) async def test_run_scheduler_async_def_wait(c, s, a, b): async def f(dask_scheduler): await asyncio.sleep(0.01) dask_scheduler.foo = "bar" await c.run_on_scheduler(f, wait=False) while not hasattr(s, "foo"): await asyncio.sleep(0.01) assert s.foo == "bar" async def f(dask_worker): await asyncio.sleep(0.01) dask_worker.foo = "bar" await c.run(f, wait=False) while not hasattr(a, "foo") or not hasattr(b, "foo"): await asyncio.sleep(0.01) assert a.foo == "bar" assert b.foo == "bar" @gen_cluster(client=True, nthreads=[("127.0.0.1", 2)] * 2) async def test_performance_report(c, s, a, b): da = pytest.importorskip("dask.array") async def f(): """ We wrap this in a function so that the assertions aren't in the performanace report itself Also, we want this comment to appear """ x = da.random.random((1000, 1000), chunks=(100, 100)) with tmpfile(extension="html") as fn: async with performance_report(filename=fn): await c.compute((x + x.T).sum()) with open(fn) as f: data = f.read() return data data = await f() assert "Also, we want this comment to appear" in data assert "bokeh" in data assert "random" in data assert "Dask Performance Report" in data assert "x = da.random" in data assert "Threads: 4" in data @pytest.mark.asyncio async def test_client_gather_semaphor_loop(cleanup): async with Scheduler(port=0) as s: async with Client(s.address, asynchronous=True) as c: assert c._gather_semaphore._loop is c.loop.asyncio_loop @gen_cluster(client=True) async def test_as_completed_condition_loop(c, s, a, b): seq = c.map(inc, range(5)) ac = as_completed(seq) assert ac.condition._loop == c.loop.asyncio_loop def test_client_connectionpool_semaphore_loop(s, a, b): with Client(s["address"]) as c: assert c.rpc.semaphore._loop is c.loop.asyncio_loop @pytest.mark.slow @pytest.mark.asyncio async def test_mixed_compression(cleanup): pytest.importorskip("lz4") da = pytest.importorskip("dask.array") async with Scheduler(port=0, dashboard_address=":0") as s: async with Nanny( s.address, nthreads=1, config={"distributed.comm.compression": None} ) as a: async with Nanny( s.address, nthreads=1, config={"distributed.comm.compression": "lz4"} ) as b: async with Client(s.address, asynchronous=True) as c: await c.get_versions() x = da.ones((10000, 10000)) y = x + x.T await c.compute(y.sum())
ssh.py
from __future__ import absolute_import from __future__ import division import inspect import logging import os import re import shutil import six import string import sys import tarfile import tempfile import threading import time import types from pwnlib import term from pwnlib.context import context, LocalContext from pwnlib.log import Logger from pwnlib.log import getLogger from pwnlib.term import text from pwnlib.timeout import Timeout from pwnlib.tubes.sock import sock from pwnlib.util import hashes from pwnlib.util import misc from pwnlib.util import safeeval from pwnlib.util.sh_string import sh_string # Kill the warning line: # No handlers could be found for logger "paramiko.transport" paramiko_log = logging.getLogger("paramiko.transport") h = logging.StreamHandler(open(os.devnull,'w+')) h.setFormatter(logging.Formatter()) paramiko_log.addHandler(h) class ssh_channel(sock): #: Parent :class:`ssh` object parent = None #: Remote host host = None #: Return code, or :const:`None` if the process has not returned #: Use :meth:`poll` to check. returncode = None #: :const:`True` if a tty was allocated for this channel tty = False #: Environment specified for the remote process, or :const:`None` #: if the default environment was used env = None #: Command specified for the constructor process = None def __init__(self, parent, process = None, tty = False, wd = None, env = None, raw = True, *args, **kwargs): super(ssh_channel, self).__init__(*args, **kwargs) # keep the parent from being garbage collected in some cases self.parent = parent self.returncode = None self.host = parent.host self.tty = tty self.env = env self.process = process self.cwd = wd or '.' if isinstance(wd, six.text_type): wd = context._encode(wd) env = env or {} msg = 'Opening new channel: %r' % (process or 'shell') if isinstance(process, (list, tuple)): process = b' '.join(context._encode(sh_string(s)) for s in process) if isinstance(process, six.text_type): process = context._encode(process) if process and wd: process = b'cd ' + sh_string(wd) + b' >/dev/null 2>&1; ' + process if process and env: for name, value in env.items(): nameb = context._encode(name) if not re.match(b'^[a-zA-Z_][a-zA-Z0-9_]*$', nameb): self.error('run(): Invalid environment key %r' % name) export = b'export %s=%s;' % (nameb, sh_string(context._encode(value))) process = export + process if process and tty: if raw: process = b'stty raw -ctlecho -echo; ' + process else: process = b'stty -ctlecho -echo; ' + process # If this object is enabled for DEBUG-level logging, don't hide # anything about the command that's actually executed. if process and self.isEnabledFor(logging.DEBUG): msg = 'Opening new channel: %r' % ((process,) or 'shell') with self.waitfor(msg) as h: import paramiko try: self.sock = parent.transport.open_session() except paramiko.ChannelException as e: if e.args == (1, 'Administratively prohibited'): self.error("Too many sessions open! Use ssh_channel.close() or 'with'!") raise e if self.tty: self.sock.get_pty('xterm', term.width, term.height) def resizer(): if self.sock: try: self.sock.resize_pty(term.width, term.height) except paramiko.ssh_exception.SSHException: pass self.resizer = resizer term.term.on_winch.append(self.resizer) else: self.resizer = None # Put stderr on stdout. This might not always be desirable, # but our API does not support multiple streams self.sock.set_combine_stderr(True) self.settimeout(self.timeout) if process: self.sock.exec_command(process) else: self.sock.invoke_shell() h.success() def kill(self): """kill() Kills the process. """ self.close() def recvall(self, timeout = sock.forever): # We subclass tubes.sock which sets self.sock to None. # # However, we need to wait for the return value to propagate, # which may not happen by the time .close() is called by tube.recvall() tmp_sock = self.sock tmp_close = self.close self.close = lambda: None timeout = self.maximum if self.timeout is self.forever else self.timeout data = super(ssh_channel, self).recvall(timeout) # Restore self.sock to be able to call wait() self.close = tmp_close self.sock = tmp_sock self.wait() self.close() # Again set self.sock to None self.sock = None return data def wait(self, timeout=sock.default): # TODO: deal with timeouts return self.poll(block=True) def poll(self, block=False): """poll() -> int Poll the exit code of the process. Will return None, if the process has not yet finished and the exit code otherwise. """ if self.returncode is None and self.sock \ and (block or self.sock.exit_status_ready()): while not self.sock.status_event.is_set(): self.sock.status_event.wait(0.05) self.returncode = self.sock.recv_exit_status() return self.returncode def can_recv_raw(self, timeout): with self.countdown(timeout): while self.countdown_active(): if self.sock.recv_ready(): return True time.sleep(min(self.timeout, 0.05)) return False def interactive(self, prompt = term.text.bold_red('$') + ' '): """interactive(prompt = pwnlib.term.text.bold_red('$') + ' ') If not in TTY-mode, this does exactly the same as meth:`pwnlib.tubes.tube.tube.interactive`, otherwise it does mostly the same. An SSH connection in TTY-mode will typically supply its own prompt, thus the prompt argument is ignored in this case. We also have a few SSH-specific hacks that will ideally be removed once the :mod:`pwnlib.term` is more mature. """ # If we are only executing a regular old shell, we need to handle # control codes (specifically Ctrl+C). # # Otherwise, we can just punt to the default implementation of interactive() if self.process is not None: return super(ssh_channel, self).interactive(prompt) self.info('Switching to interactive mode') # We would like a cursor, please! term.term.show_cursor() event = threading.Event() def recv_thread(event): while not event.is_set(): try: cur = self.recv(timeout = 0.05) cur = cur.replace(b'\r\n',b'\n') cur = cur.replace(b'\r',b'') if cur is None: continue elif cur == b'\a': # Ugly hack until term unstands bell characters continue stdout = sys.stdout if not term.term_mode: stdout = getattr(stdout, 'buffer', stdout) stdout.write(cur) stdout.flush() except EOFError: self.info('Got EOF while reading in interactive') event.set() break t = context.Thread(target = recv_thread, args = (event,)) t.daemon = True t.start() while not event.is_set(): if term.term_mode: try: data = term.key.getraw(0.1) except KeyboardInterrupt: data = [3] # This is ctrl-c except IOError: if not event.is_set(): raise else: stdin = getattr(sys.stdin, 'buffer', sys.stdin) data = stdin.read(1) if not data: event.set() else: data = bytearray(data) if data: try: self.send(bytes(bytearray(data))) except EOFError: event.set() self.info('Got EOF while sending in interactive') while t.is_alive(): t.join(timeout = 0.1) # Restore term.term.hide_cursor() def close(self): self.poll() while self.resizer in term.term.on_winch: term.term.on_winch.remove(self.resizer) super(ssh_channel, self).close() def spawn_process(self, *args, **kwargs): self.error("Cannot use spawn_process on an SSH channel.""") def _close_msg(self): self.info('Closed SSH channel with %s' % self.host) class ssh_process(ssh_channel): #: Working directory cwd = None #: PID of the process #: Only valid when instantiated through :meth:`ssh.process` pid = None #: Executable of the procesks #: Only valid when instantiated through :meth:`ssh.process` executable = None #: Arguments passed to the process #: Only valid when instantiated through :meth:`ssh.process` argv = None def libs(self): """libs() -> dict Returns a dictionary mapping the address of each loaded library in the process's address space. If ``/proc/$PID/maps`` cannot be opened, the output of ldd is used verbatim, which may be different than the actual addresses if ASLR is enabled. """ maps = self.parent.libs(self.executable) maps_raw = self.parent.cat('/proc/%d/maps' % self.pid).decode() for lib in maps: remote_path = lib.split(self.parent.host)[-1] for line in maps_raw.splitlines(): if line.endswith(remote_path): address = line.split('-')[0] maps[lib] = int(address, 16) break return maps @property def libc(self): """libc() -> ELF Returns an ELF for the libc for the current process. If possible, it is adjusted to the correct address automatically. Examples: >>> s = ssh(host='example.pwnme') >>> p = s.process('true') >>> p.libc # doctest: +ELLIPSIS ELF(.../libc.so.6') """ from pwnlib.elf import ELF for lib, address in self.libs().items(): if 'libc.so' in lib: e = ELF(lib) e.address = address return e @property def elf(self): """elf() -> pwnlib.elf.elf.ELF Returns an ELF file for the executable that launched the process. """ import pwnlib.elf.elf libs = self.parent.libs(self.executable) for lib in libs: # Cannot just check "executable in lib", see issue #1047 if lib.endswith(self.executable): return pwnlib.elf.elf.ELF(lib) @property def corefile(self): import pwnlib.elf.corefile finder = pwnlib.elf.corefile.CorefileFinder(self) if not finder.core_path: self.error("Could not find core file for pid %i" % self.pid) return pwnlib.elf.corefile.Corefile(finder.core_path) def getenv(self, variable, **kwargs): r"""Retrieve the address of an environment variable in the remote process. Examples: >>> s = ssh(host='example.pwnme') >>> p = s.process(['python', '-c', 'import time; time.sleep(10)']) >>> hex(p.getenv('PATH')) # doctest: +ELLIPSIS '0x...' """ argv0 = self.argv[0] variable = context._encode(variable) script = ';'.join(('from ctypes import *', 'import os', 'libc = CDLL("libc.so.6")', 'getenv = libc.getenv', 'getenv.restype = c_void_p', 'print(os.path.realpath(%r))' % self.executable, 'print(getenv(%r))' % variable,)) try: with context.quiet: python = self.parent.which('python2.7') or self.parent.which('python') if not python: self.error("Python is not installed on the remote system.") io = self.parent.process([argv0,'-c', script.strip()], executable=python, env=self.env, **kwargs) path = io.recvline() address = int(io.recvall()) address -= len(python) address += len(path) return int(address) & context.mask except Exception: self.exception("Could not look up environment variable %r" % variable) def _close_msg(self): # If we never completely started up, just use the parent implementation if self.executable is None: return super(ssh_process, self)._close_msg() self.info('Stopped remote process %r on %s (pid %i)' \ % (os.path.basename(self.executable), self.host, self.pid)) class ssh_connecter(sock): def __init__(self, parent, host, port, *a, **kw): super(ssh_connecter, self).__init__(*a, **kw) # keep the parent from being garbage collected in some cases self.parent = parent self.host = parent.host self.rhost = host self.rport = port msg = 'Connecting to %s:%d via SSH to %s' % (self.rhost, self.rport, self.host) with self.waitfor(msg) as h: try: self.sock = parent.transport.open_channel('direct-tcpip', (host, port), ('127.0.0.1', 0)) except Exception as e: self.exception(e.message) raise try: # Iterate all layers of proxying to get to base-level Socket object curr = self.sock.get_transport().sock while getattr(curr, "get_transport", None): curr = curr.get_transport().sock sockname = curr.getsockname() self.lhost = sockname[0] self.lport = sockname[1] except Exception as e: self.exception("Could not find base-level Socket object.") raise e h.success() def spawn_process(self, *args, **kwargs): self.error("Cannot use spawn_process on an SSH channel.""") def _close_msg(self): self.info("Closed remote connection to %s:%d via SSH connection to %s" % (self.rhost, self.rport, self.host)) class ssh_listener(sock): def __init__(self, parent, bind_address, port, *a, **kw): super(ssh_listener, self).__init__(*a, **kw) # keep the parent from being garbage collected in some cases self.parent = parent self.host = parent.host try: self.port = parent.transport.request_port_forward(bind_address, port) except Exception: h.failure('Failed create a port forwarding') raise def accepter(): msg = 'Waiting on port %d via SSH to %s' % (self.port, self.host) h = self.waitfor(msg) try: self.sock = parent.transport.accept() parent.transport.cancel_port_forward(bind_address, self.port) except Exception: self.sock = None h.failure() self.exception('Failed to get a connection') return self.rhost, self.rport = self.sock.origin_addr h.success('Got connection from %s:%d' % (self.rhost, self.rport)) self._accepter = context.Thread(target = accepter) self._accepter.daemon = True self._accepter.start() def _close_msg(self): self.info("Closed remote connection to %s:%d via SSH listener on port %d via %s" % (self.rhost, self.rport, self.port, self.host)) def spawn_process(self, *args, **kwargs): self.error("Cannot use spawn_process on an SSH channel.""") def wait_for_connection(self): """Blocks until a connection has been established.""" _ = self.sock return self def __getattr__(self, key): if key == 'sock': while self._accepter.is_alive(): self._accepter.join(timeout = 0.1) return self.sock else: return getattr(super(ssh_listener, self), key) class ssh(Timeout, Logger): #: Remote host name (``str``) host = None #: Remote port (``int``) port = None #: Enable caching of SSH downloads (``bool``) cache = True #: Paramiko SSHClient which backs this object client = None #: Paramiko SFTPClient object which is used for file transfers. #: Set to :const:`None` to disable ``sftp``. sftp = None #: PID of the remote ``sshd`` process servicing this connection. pid = None _cwd = '.' def __init__(self, user=None, host=None, port=22, password=None, key=None, keyfile=None, proxy_command=None, proxy_sock=None, level=None, cache=True, ssh_agent=False, *a, **kw): """Creates a new ssh connection. Arguments: user(str): The username to log in with host(str): The hostname to connect to port(int): The port to connect to password(str): Try to authenticate using this password key(str): Try to authenticate using this private key. The string should be the actual private key. keyfile(str): Try to authenticate using this private key. The string should be a filename. proxy_command(str): Use this as a proxy command. It has approximately the same semantics as ProxyCommand from ssh(1). proxy_sock(str): Use this socket instead of connecting to the host. timeout: Timeout, in seconds level: Log level cache: Cache downloaded files (by hash/size/timestamp) ssh_agent: If :const:`True`, enable usage of keys via ssh-agent NOTE: The proxy_command and proxy_sock arguments is only available if a fairly new version of paramiko is used. Example proxying: .. doctest:: :skipif: True >>> s1 = ssh(host='example.pwnme') >>> r1 = s1.remote('localhost', 22) >>> s2 = ssh(host='example.pwnme', proxy_sock=r1.sock) >>> r2 = s2.remote('localhost', 22) # and so on... >>> for x in r2, s2, r1, s1: x.close() """ super(ssh, self).__init__(*a, **kw) Logger.__init__(self) if level is not None: self.setLevel(level) self.host = host self.port = port self.user = user self.password = password self.key = key self.keyfile = keyfile self._cachedir = os.path.join(tempfile.gettempdir(), 'pwntools-ssh-cache') self.cache = cache # Deferred attributes self._platform_info = {} self._aslr = None self._aslr_ulimit = None misc.mkdir_p(self._cachedir) # This is a dirty hack to make my Yubikey shut up. # If anybody has a problem with this, please open a bug and I'll # figure out a better workaround. if not ssh_agent: os.environ.pop('SSH_AUTH_SOCK', None) import paramiko # Make a basic attempt to parse the ssh_config file try: config_file = os.path.expanduser('~/.ssh/config') if os.path.exists(config_file): ssh_config = paramiko.SSHConfig() ssh_config.parse(open(config_file)) host_config = ssh_config.lookup(host) if 'hostname' in host_config: self.host = host = host_config['hostname'] if not user and 'user' in host_config: self.user = user = host_config['user'] if not keyfile and 'identityfile' in host_config: keyfile = host_config['identityfile'][0] if keyfile.lower() == 'none': keyfile = None except Exception as e: self.debug("An error occurred while parsing ~/.ssh/config:\n%s" % e) keyfiles = [os.path.expanduser(keyfile)] if keyfile else [] msg = 'Connecting to %s on port %d' % (host, port) with self.waitfor(msg) as h: self.client = paramiko.SSHClient() self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) known_hosts = os.path.expanduser('~/.ssh/known_hosts') if os.path.exists(known_hosts): self.client.load_host_keys(known_hosts) has_proxy = bool(proxy_sock or proxy_command) if has_proxy: if 'ProxyCommand' not in dir(paramiko): self.error('This version of paramiko does not support proxies.') if proxy_sock and proxy_command: self.error('Cannot have both a proxy command and a proxy sock') if proxy_command: proxy_sock = paramiko.ProxyCommand(proxy_command) else: proxy_sock = None try: self.client.connect(host, port, user, password, key, keyfiles, self.timeout, compress = True, sock = proxy_sock) except paramiko.BadHostKeyException as e: self.error("Remote host %(host)s is using a different key than stated in known_hosts\n" " To remove the existing entry from your known_hosts and trust the new key, run the following commands:\n" " $ ssh-keygen -R %(host)s\n" " $ ssh-keygen -R [%(host)s]:%(port)s" % locals()) self.transport = self.client.get_transport() self.transport.use_compression(True) h.success() self._tried_sftp = False if self.sftp: with context.quiet: self.cwd = context._decode(self.pwd()) else: self.cwd = '.' with context.local(log_level='error'): def getppid(): print(os.getppid()) try: self.pid = int(self.process('false', preexec_fn=getppid).recvall()) except Exception: self.pid = None try: self.info_once(self.checksec()) except Exception: self.warn_once("Couldn't check security settings on %r" % self.host) def __repr__(self): return "{}(user={!r}, host={!r})".format(self.__class__.__name__, self.user, self.host) @property def cwd(self): return self._cwd @cwd.setter def cwd(self, cwd): self._cwd = cwd if self.sftp: self.sftp.chdir(cwd) @property def sftp(self): if not self._tried_sftp: try: self._sftp = self.transport.open_sftp_client() except Exception: self._sftp = None self._tried_sftp = True return self._sftp @sftp.setter def sftp(self, value): self._sftp = value self._tried_sftp = True def __enter__(self, *a): return self def __exit__(self, *a, **kw): self.close() def shell(self, shell = None, tty = True, timeout = Timeout.default): """shell(shell = None, tty = True, timeout = Timeout.default) -> ssh_channel Open a new channel with a shell inside. Arguments: shell(str): Path to the shell program to run. If :const:`None`, uses the default shell for the logged in user. tty(bool): If :const:`True`, then a TTY is requested on the remote server. Returns: Return a :class:`pwnlib.tubes.ssh.ssh_channel` object. Examples: >>> s = ssh(host='example.pwnme') >>> sh = s.shell('/bin/sh') >>> sh.sendline(b'echo Hello; exit') >>> print(b'Hello' in sh.recvall()) True """ return self.run(shell, tty, timeout = timeout) def process(self, argv=None, executable=None, tty=True, cwd=None, env=None, timeout=Timeout.default, run=True, stdin=0, stdout=1, stderr=2, preexec_fn=None, preexec_args=(), raw=True, aslr=None, setuid=None, shell=False): r""" Executes a process on the remote server, in the same fashion as pwnlib.tubes.process.process. To achieve this, a Python script is created to call ``os.execve`` with the appropriate arguments. As an added bonus, the ``ssh_channel`` object returned has a ``pid`` property for the process pid. Arguments: argv(list): List of arguments to pass into the process executable(str): Path to the executable to run. If :const:`None`, ``argv[0]`` is used. tty(bool): Request a `tty` from the server. This usually fixes buffering problems by causing `libc` to write data immediately rather than buffering it. However, this disables interpretation of control codes (e.g. Ctrl+C) and breaks `.shutdown`. cwd(str): Working directory. If :const:`None`, uses the working directory specified on :attr:`cwd` or set via :meth:`set_working_directory`. env(dict): Environment variables to set in the child. If :const:`None`, inherits the default environment. timeout(int): Timeout to set on the `tube` created to interact with the process. run(bool): Set to :const:`True` to run the program (default). If :const:`False`, returns the path to an executable Python script on the remote server which, when executed, will do it. stdin(int, str): If an integer, replace stdin with the numbered file descriptor. If a string, a open a file with the specified path and replace stdin with its file descriptor. May also be one of ``sys.stdin``, ``sys.stdout``, ``sys.stderr``. If :const:`None`, the file descriptor is closed. stdout(int, str): See ``stdin``. stderr(int, str): See ``stdin``. preexec_fn(callable): Function which is executed on the remote side before execve(). This **MUST** be a self-contained function -- it must perform all of its own imports, and cannot refer to variables outside its scope. preexec_args(object): Argument passed to ``preexec_fn``. This **MUST** only consist of native Python objects. raw(bool): If :const:`True`, disable TTY control code interpretation. aslr(bool): See :class:`pwnlib.tubes.process.process` for more information. setuid(bool): See :class:`pwnlib.tubes.process.process` for more information. shell(bool): Pass the command-line arguments to the shell. Returns: A new SSH channel, or a path to a script if ``run=False``. Notes: Requires Python on the remote server. Examples: >>> s = ssh(host='example.pwnme') >>> sh = s.process('/bin/sh', env={'PS1':''}) >>> sh.sendline(b'echo Hello; exit') >>> sh.recvall() b'Hello\n' >>> s.process(['/bin/echo', b'\xff']).recvall() b'\xff\n' >>> s.process(['readlink', '/proc/self/exe']).recvall() # doctest: +ELLIPSIS b'.../bin/readlink\n' >>> s.process(['LOLOLOL', '/proc/self/exe'], executable='readlink').recvall() # doctest: +ELLIPSIS b'.../bin/readlink\n' >>> s.process(['LOLOLOL\x00', '/proc/self/cmdline'], executable='cat').recvall() b'LOLOLOL\x00/proc/self/cmdline\x00' >>> sh = s.process(executable='/bin/sh') >>> str(sh.pid).encode() in s.pidof('sh') # doctest: +SKIP True >>> s.process(['pwd'], cwd='/tmp').recvall() b'/tmp\n' >>> p = s.process(['python','-c','import os; os.write(1, os.read(2, 1024))'], stderr=0) >>> p.send(b'hello') >>> p.recv() b'hello' >>> s.process(['/bin/echo', 'hello']).recvall() b'hello\n' >>> s.process(['/bin/echo', 'hello'], stdout='/dev/null').recvall() b'' >>> s.process(['/usr/bin/env'], env={}).recvall() b'' >>> s.process('/usr/bin/env', env={'A':'B'}).recvall() b'A=B\n' >>> s.process('false', preexec_fn=1234) Traceback (most recent call last): ... PwnlibException: preexec_fn must be a function >>> s.process('false', preexec_fn=lambda: 1234) Traceback (most recent call last): ... PwnlibException: preexec_fn cannot be a lambda >>> def uses_globals(): ... foo = bar >>> print(s.process('false', preexec_fn=uses_globals).recvall().strip().decode()) # doctest: +ELLIPSIS Traceback (most recent call last): ... NameError: ... name 'bar' is not defined >>> s.process('echo hello', shell=True).recvall() b'hello\n' >>> io = s.process(['cat'], timeout=5) >>> io.recvline() b'' """ if not argv and not executable: self.error("Must specify argv or executable") argv = argv or [] aslr = aslr if aslr is not None else context.aslr if isinstance(argv, (six.text_type, bytes, bytearray)): argv = [argv] if not isinstance(argv, (list, tuple)): self.error('argv must be a list or tuple') if not all(isinstance(arg, (six.text_type, bytes, bytearray)) for arg in argv): self.error("argv must be strings or bytes: %r" % argv) if shell: if len(argv) != 1: self.error('Cannot provide more than 1 argument if shell=True') argv = ['/bin/sh', '-c'] + argv # Create a duplicate so we can modify it argv = list(argv or []) # Python doesn't like when an arg in argv contains '\x00' # -> execve() arg 2 must contain only strings for i, oarg in enumerate(argv): if isinstance(oarg, six.text_type): arg = oarg.encode('utf-8') else: arg = oarg if b'\x00' in arg[:-1]: self.error('Inappropriate nulls in argv[%i]: %r' % (i, oarg)) argv[i] = bytearray(arg.rstrip(b'\x00')) if env is not None and not isinstance(env, dict) and env != os.environ: self.error("env must be a dict: %r" % env) # Converts the environment variables to a list of tuples to retain order. env2 = [] # Python also doesn't like when envp contains '\x00' if env and hasattr(env, 'items'): for k, v in env.items(): if isinstance(k, six.text_type): k = k.encode('utf-8') if isinstance(v, six.text_type): v = v.encode('utf-8') if b'\x00' in k[:-1]: self.error('Inappropriate nulls in environment key %r' % k) if b'\x00' in v[:-1]: self.error('Inappropriate nulls in environment value %r=%r' % (k, v)) env2.append((bytearray(k.rstrip(b'\x00')), bytearray(v.rstrip(b'\x00')))) env = env2 or env executable = executable or argv[0] cwd = cwd or self.cwd # Validate, since failures on the remote side will suck. if not isinstance(executable, (six.text_type, six.binary_type, bytearray)): self.error("executable / argv[0] must be a string: %r" % executable) executable = context._decode(executable) # Allow passing in sys.stdin/stdout/stderr objects handles = {sys.stdin: 0, sys.stdout:1, sys.stderr:2} stdin = handles.get(stdin, stdin) stdout = handles.get(stdout, stdout) stderr = handles.get(stderr, stderr) # Allow the user to provide a self-contained function to run def func(): pass func = preexec_fn or func func_args = preexec_args if not isinstance(func, types.FunctionType): self.error("preexec_fn must be a function") func_name = func.__name__ if func_name == (lambda: 0).__name__: self.error("preexec_fn cannot be a lambda") func_src = inspect.getsource(func).strip() setuid = True if setuid is None else bool(setuid) script = r""" #!/usr/bin/env python import os, sys, ctypes, resource, platform, stat from collections import OrderedDict try: integer_types = int, long except NameError: integer_types = int, exe = %(executable)r argv = [bytes(a) for a in %(argv)r] env = %(env)r os.chdir(%(cwd)r) if env is not None: env = OrderedDict((bytes(k), bytes(v)) for k,v in env) os.environ.clear() getattr(os, 'environb', os.environ).update(env) else: env = os.environ def is_exe(path): return os.path.isfile(path) and os.access(path, os.X_OK) PATH = os.environ.get('PATH','').split(os.pathsep) if os.path.sep not in exe and not is_exe(exe): for path in PATH: test_path = os.path.join(path, exe) if is_exe(test_path): exe = test_path break if not is_exe(exe): sys.stderr.write('3\n') sys.stderr.write("{} is not executable or does not exist in $PATH: {}".format(exe,PATH)) sys.exit(-1) if not %(setuid)r: PR_SET_NO_NEW_PRIVS = 38 result = ctypes.CDLL('libc.so.6').prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0) if result != 0: sys.stdout.write('3\n') sys.stdout.write("Could not disable setuid: prctl(PR_SET_NO_NEW_PRIVS) failed") sys.exit(-1) try: PR_SET_PTRACER = 0x59616d61 PR_SET_PTRACER_ANY = -1 ctypes.CDLL('libc.so.6').prctl(PR_SET_PTRACER, PR_SET_PTRACER_ANY, 0, 0, 0) except Exception: pass # Determine what UID the process will execute as # This is used for locating apport core dumps suid = os.getuid() sgid = os.getgid() st = os.stat(exe) if %(setuid)r: if (st.st_mode & stat.S_ISUID): suid = st.st_uid if (st.st_mode & stat.S_ISGID): sgid = st.st_gid if sys.argv[-1] == 'check': sys.stdout.write("1\n") sys.stdout.write(str(os.getpid()) + "\n") sys.stdout.write(str(os.getuid()) + "\n") sys.stdout.write(str(os.getgid()) + "\n") sys.stdout.write(str(suid) + "\n") sys.stdout.write(str(sgid) + "\n") sys.stdout.write(os.path.realpath(exe) + '\x00') sys.stdout.flush() for fd, newfd in {0: %(stdin)r, 1: %(stdout)r, 2:%(stderr)r}.items(): if newfd is None: os.close(fd) elif isinstance(newfd, (str, bytes)): newfd = os.open(newfd, os.O_RDONLY if fd == 0 else (os.O_RDWR|os.O_CREAT)) os.dup2(newfd, fd) os.close(newfd) elif isinstance(newfd, integer_types) and newfd != fd: os.dup2(fd, newfd) if not %(aslr)r: if platform.system().lower() == 'linux' and %(setuid)r is not True: ADDR_NO_RANDOMIZE = 0x0040000 ctypes.CDLL('libc.so.6').personality(ADDR_NO_RANDOMIZE) resource.setrlimit(resource.RLIMIT_STACK, (-1, -1)) # Attempt to dump ALL core file regions try: with open('/proc/self/coredump_filter', 'w') as core_filter: core_filter.write('0x3f\n') except Exception: pass # Assume that the user would prefer to have core dumps. try: resource.setrlimit(resource.RLIMIT_CORE, (-1, -1)) except Exception: pass %(func_src)s %(func_name)s(*%(func_args)r) os.execve(exe, argv, env) """ % locals() script = script.strip() self.debug("Created execve script:\n" + script) if not run: with context.local(log_level='error'): tmpfile = self.mktemp('-t', 'pwnlib-execve-XXXXXXXXXX') self.chmod('+x', tmpfile) self.info("Uploading execve script to %r" % tmpfile) self.upload_data(script, tmpfile) return tmpfile if self.isEnabledFor(logging.DEBUG): execve_repr = "execve(%r, %s, %s)" % (executable, argv, 'os.environ' if (env in (None, os.environ)) else env) # Avoid spamming the screen if self.isEnabledFor(logging.DEBUG) and len(execve_repr) > 512: execve_repr = execve_repr[:512] + '...' else: execve_repr = repr(executable) msg = 'Starting remote process %s on %s' % (execve_repr, self.host) if timeout == Timeout.default: timeout = self.timeout with self.progress(msg) as h: script = 'echo PWNTOOLS; for py in python2.7 python2 python; do test -x "$(which $py 2>&1)" && echo $py && exec $py -c %s check; done; echo 2' % sh_string(script) with context.quiet: python = ssh_process(self, script, tty=True, raw=True, level=self.level, timeout=timeout) try: python.recvline_contains('PWNTOOLS') # Magic flag so that any sh/bash initialization errors are swallowed python.recvline() # Python interpreter that was selected result = safeeval.const(python.recvline()) # Status flag from the Python script except (EOFError, ValueError): h.failure("Process creation failed") self.warn_once('Could not find a Python interpreter on %s\n' % self.host \ + "Use ssh.run() instead of ssh.process()") return None # If an error occurred, try to grab as much output # as we can. if result != 1: error_message = python.recvrepeat(timeout=1) if result == 0: self.error("%r does not exist or is not executable" % executable) elif result == 3: self.error(error_message) elif result == 2: self.error("python is not installed on the remote system %r" % self.host) elif result != 1: h.failure("something bad happened:\n%s" % error_message) python.pid = safeeval.const(python.recvline()) python.uid = safeeval.const(python.recvline()) python.gid = safeeval.const(python.recvline()) python.suid = safeeval.const(python.recvline()) python.sgid = safeeval.const(python.recvline()) python.argv = argv python.executable = context._decode(python.recvuntil(b'\x00')[:-1]) h.success('pid %i' % python.pid) if not aslr and setuid and (python.uid != python.suid or python.gid != python.sgid): effect = "partial" if self.aslr_ulimit else "no" message = "Specfied aslr=False on setuid binary %s\n" % python.executable message += "This will have %s effect. Add setuid=False to disable ASLR for debugging.\n" % effect if self.aslr_ulimit: message += "Unlimited stack size should de-randomize shared libraries." self.warn_once(message) elif not aslr: self.warn_once("ASLR is disabled for %r!" % python.executable) return python def which(self, program): """which(program) -> str Minor modification to just directly invoking ``which`` on the remote system which adds the current working directory to the end of ``$PATH``. """ # If name is a path, do not attempt to resolve it. if os.path.sep in program: return program result = self.run('export PATH=$PATH:$PWD; which %s' % program).recvall().strip().decode() if ('/%s' % program) not in result: return None return result def system(self, process, tty = True, wd = None, env = None, timeout = None, raw = True): r"""system(process, tty = True, wd = None, env = None, timeout = Timeout.default, raw = True) -> ssh_channel Open a new channel with a specific process inside. If `tty` is True, then a TTY is requested on the remote server. If `raw` is True, terminal control codes are ignored and input is not echoed back. Return a :class:`pwnlib.tubes.ssh.ssh_channel` object. Examples: >>> s = ssh(host='example.pwnme') >>> py = s.run('python -i') >>> _ = py.recvuntil(b'>>> ') >>> py.sendline(b'print(2+2)') >>> py.sendline(b'exit') >>> print(repr(py.recvline())) b'4\n' >>> s.system('env | grep -a AAAA', env={'AAAA': b'\x90'}).recvall() b'AAAA=\x90\n' """ if wd is None: wd = self.cwd if timeout is None: timeout = self.timeout return ssh_channel(self, process, tty, wd, env, timeout = timeout, level = self.level, raw = raw) #: Backward compatibility. Use :meth:`system` run = system def getenv(self, variable, **kwargs): """Retrieve the address of an environment variable on the remote system. Note: The exact address will differ based on what other environment variables are set, as well as argv[0]. In order to ensure that the path is *exactly* the same, it is recommended to invoke the process with ``argv=[]``. """ script = ''' from ctypes import *; libc = CDLL('libc.so.6'); print(libc.getenv(%r)) ''' % variable with context.local(log_level='error'): python = self.which('python') if not python: self.error("Python is not installed on the remote system.") io = self.process(['','-c', script.strip()], executable=python, **kwargs) result = io.recvall() try: return int(result) & context.mask except ValueError: self.exception("Could not look up environment variable %r" % variable) def run_to_end(self, process, tty = False, wd = None, env = None): r"""run_to_end(process, tty = False, timeout = Timeout.default, env = None) -> str Run a command on the remote server and return a tuple with (data, exit_status). If `tty` is True, then the command is run inside a TTY on the remote server. Examples: >>> s = ssh(host='example.pwnme') >>> print(s.run_to_end('echo Hello; exit 17')) (b'Hello\n', 17) """ with context.local(log_level = 'ERROR'): c = self.run(process, tty, wd = wd, timeout = Timeout.default) data = c.recvall() retcode = c.wait() c.close() return data, retcode def connect_remote(self, host, port, timeout = Timeout.default): r"""connect_remote(host, port, timeout = Timeout.default) -> ssh_connecter Connects to a host through an SSH connection. This is equivalent to using the ``-L`` flag on ``ssh``. Returns a :class:`pwnlib.tubes.ssh.ssh_connecter` object. Examples: >>> from pwn import * >>> l = listen() >>> s = ssh(host='example.pwnme') >>> a = s.connect_remote(s.host, l.lport) >>> a=a; b = l.wait_for_connection() # a=a; prevents hangs >>> a.sendline(b'Hello') >>> print(repr(b.recvline())) b'Hello\n' """ return ssh_connecter(self, host, port, timeout, level=self.level) remote = connect_remote def listen_remote(self, port = 0, bind_address = '', timeout = Timeout.default): r"""listen_remote(port = 0, bind_address = '', timeout = Timeout.default) -> ssh_connecter Listens remotely through an SSH connection. This is equivalent to using the ``-R`` flag on ``ssh``. Returns a :class:`pwnlib.tubes.ssh.ssh_listener` object. Examples: >>> from pwn import * >>> s = ssh(host='example.pwnme') >>> l = s.listen_remote() >>> a = remote(s.host, l.port) >>> a=a; b = l.wait_for_connection() # a=a; prevents hangs >>> a.sendline(b'Hello') >>> print(repr(b.recvline())) b'Hello\n' """ return ssh_listener(self, bind_address, port, timeout, level=self.level) listen = listen_remote def __getitem__(self, attr): """Permits indexed access to run commands over SSH Examples: >>> s = ssh(host='example.pwnme') >>> print(repr(s['echo hello'])) b'hello' """ return self.__getattr__(attr)() def __call__(self, attr): """Permits function-style access to run commands over SSH Examples: >>> s = ssh(host='example.pwnme') >>> print(repr(s('echo hello'))) b'hello' """ return self.__getattr__(attr)() def __getattr__(self, attr): """Permits member access to run commands over SSH Examples: >>> s = ssh(host='example.pwnme') >>> s.echo('hello') b'hello' >>> s.whoami() b'travis' >>> s.echo(['huh','yay','args']) b'huh yay args' """ bad_attrs = [ 'trait_names', # ipython tab-complete ] if attr in self.__dict__ \ or attr in bad_attrs \ or attr.startswith('_'): raise AttributeError @LocalContext def runner(*args): if len(args) == 1 and isinstance(args[0], (list, tuple)): command = [attr] + args[0] else: command = ' '.join((attr,) + tuple(map(six.ensure_str, args))) return self.run(command).recvall().strip() return runner def connected(self): """Returns True if we are connected. Example: >>> s = ssh(host='example.pwnme') >>> s.connected() True >>> s.close() >>> s.connected() False """ return bool(self.client and self.client.get_transport().is_active()) def close(self): """Close the connection.""" if self.client: self.client.close() self.client = None self.info("Closed connection to %r" % self.host) def _libs_remote(self, remote): """Return a dictionary of the libraries used by a remote file.""" escaped_remote = sh_string(remote) cmd = ''.join([ '(', 'ulimit -s unlimited;', 'ldd %s > /dev/null &&' % escaped_remote, '(', 'LD_TRACE_LOADED_OBJECTS=1 %s||' % escaped_remote, 'ldd %s' % escaped_remote, '))', ' 2>/dev/null' ]) data, status = self.run_to_end(cmd) if status != 0: self.error('Unable to find libraries for %r' % remote) return {} return misc.parse_ldd_output(context._decode(data)) def _get_fingerprint(self, remote): cmd = '(sha256 || sha256sum || openssl sha256) 2>/dev/null < ' cmd = cmd + sh_string(remote) data, status = self.run_to_end(cmd) if status != 0: return None # OpenSSL outputs in the format of... # (stdin)= e3b0c4429... data = data.replace(b'(stdin)= ',b'') # sha256 and sha256sum outputs in the format of... # e3b0c442... - data = data.replace(b'-',b'').strip() if not isinstance(data, str): data = data.decode('ascii') return data def _get_cachefile(self, fingerprint): return os.path.join(self._cachedir, fingerprint) def _verify_local_fingerprint(self, fingerprint): if not set(fingerprint).issubset(string.hexdigits) or \ len(fingerprint) != 64: self.error('Invalid fingerprint %r' % fingerprint) return False local = self._get_cachefile(fingerprint) if not os.path.isfile(local): return False if hashes.sha256filehex(local) == fingerprint: return True else: os.unlink(local) return False def _download_raw(self, remote, local, h): def update(has, total): h.status("%s/%s" % (misc.size(has), misc.size(total))) if self.sftp: try: self.sftp.get(remote, local, update) return except IOError: pass cmd = 'wc -c < ' + sh_string(remote) total, exitcode = self.run_to_end(cmd) if exitcode != 0: h.failure("%r does not exist or is not accessible" % remote) return total = int(total) with context.local(log_level = 'ERROR'): cmd = 'cat < ' + sh_string(remote) c = self.run(cmd) data = b'' while True: try: data += c.recv() except EOFError: break update(len(data), total) result = c.wait() if result != 0: h.failure('Could not download file %r (%r)' % (remote, result)) return with open(local, 'wb') as fd: fd.write(data) def _download_to_cache(self, remote, p): with context.local(log_level='error'): remote = self.readlink('-f',remote) if not hasattr(remote, 'encode'): remote = remote.decode('utf-8') fingerprint = self._get_fingerprint(remote) if fingerprint is None: local = os.path.normpath(remote) local = os.path.basename(local) local += time.strftime('-%Y-%m-%d-%H:%M:%S') local = os.path.join(self._cachedir, local) self._download_raw(remote, local, p) return local local = self._get_cachefile(fingerprint) if self.cache and self._verify_local_fingerprint(fingerprint): p.success('Found %r in ssh cache' % remote) else: self._download_raw(remote, local, p) if not self._verify_local_fingerprint(fingerprint): p.error('Could not download file %r' % remote) return local def download_data(self, remote): """Downloads a file from the remote server and returns it as a string. Arguments: remote(str): The remote filename to download. Examples: >>> with open('/tmp/bar','w+') as f: ... _ = f.write('Hello, world') >>> s = ssh(host='example.pwnme', ... cache=False) >>> s.download_data('/tmp/bar') b'Hello, world' >>> s._sftp = None >>> s._tried_sftp = True >>> s.download_data('/tmp/bar') b'Hello, world' """ with self.progress('Downloading %r' % remote) as p: with open(self._download_to_cache(remote, p), 'rb') as fd: return fd.read() def download_file(self, remote, local = None): """Downloads a file from the remote server. The file is cached in /tmp/pwntools-ssh-cache using a hash of the file, so calling the function twice has little overhead. Arguments: remote(str): The remote filename to download local(str): The local filename to save it to. Default is to infer it from the remote filename. """ if not local: local = os.path.basename(os.path.normpath(remote)) if os.path.basename(remote) == remote: remote = os.path.join(self.cwd, remote) with self.progress('Downloading %r to %r' % (remote, local)) as p: local_tmp = self._download_to_cache(remote, p) # Check to see if an identical copy of the file already exists if not os.path.exists(local) or hashes.sha256filehex(local_tmp) != hashes.sha256filehex(local): shutil.copy2(local_tmp, local) def download_dir(self, remote=None, local=None): """Recursively downloads a directory from the remote server Arguments: local: Local directory remote: Remote directory """ remote = remote or self.cwd if self.sftp: remote = str(self.sftp.normalize(remote)) else: with context.local(log_level='error'): remote = self.system('readlink -f ' + sh_string(remote)) basename = os.path.basename(remote) local = local or '.' local = os.path.expanduser(local) self.info("Downloading %r to %r" % (basename,local)) with context.local(log_level='error'): remote_tar = self.mktemp() cmd = 'tar -C %s -czf %s %s' % \ (sh_string(remote), sh_string(remote_tar), sh_string(basename)) tar = self.system(cmd) if 0 != tar.wait(): self.error("Could not create remote tar") local_tar = tempfile.NamedTemporaryFile(suffix='.tar.gz') self.download_file(remote_tar, local_tar.name) tar = tarfile.open(local_tar.name) tar.extractall(local) def upload_data(self, data, remote): """Uploads some data into a file on the remote server. Arguments: data(str): The data to upload. remote(str): The filename to upload it to. Example: >>> s = ssh(host='example.pwnme') >>> s.upload_data(b'Hello, world', '/tmp/upload_foo') >>> print(open('/tmp/upload_foo').read()) Hello, world >>> s._sftp = False >>> s._tried_sftp = True >>> s.upload_data(b'Hello, world', '/tmp/upload_bar') >>> print(open('/tmp/upload_bar').read()) Hello, world """ data = context._encode(data) # If a relative path was provided, prepend the cwd if os.path.normpath(remote) == os.path.basename(remote): remote = os.path.join(self.cwd, remote) if self.sftp: with tempfile.NamedTemporaryFile() as f: f.write(data) f.flush() self.sftp.put(f.name, remote) return with context.local(log_level = 'ERROR'): cmd = 'cat > ' + sh_string(remote) s = self.run(cmd, tty=False) s.send(data) s.shutdown('send') data = s.recvall() result = s.wait() if result != 0: self.error("Could not upload file %r (%r)\n%s" % (remote, result, data)) def upload_file(self, filename, remote = None): """Uploads a file to the remote server. Returns the remote filename. Arguments: filename(str): The local filename to download remote(str): The remote filename to save it to. Default is to infer it from the local filename.""" if remote is None: remote = os.path.normpath(filename) remote = os.path.basename(remote) remote = os.path.join(self.cwd, remote) with open(filename, 'rb') as fd: data = fd.read() self.info("Uploading %r to %r" % (filename,remote)) self.upload_data(data, remote) return remote def upload_dir(self, local, remote=None): """Recursively uploads a directory onto the remote server Arguments: local: Local directory remote: Remote directory """ remote = remote or self.cwd local = os.path.expanduser(local) dirname = os.path.dirname(local) basename = os.path.basename(local) if not os.path.isdir(local): self.error("%r is not a directory" % local) msg = "Uploading %r to %r" % (basename,remote) with self.waitfor(msg): # Generate a tarfile with everything inside of it local_tar = tempfile.mktemp() with tarfile.open(local_tar, 'w:gz') as tar: tar.add(local, basename) # Upload and extract it with context.local(log_level='error'): remote_tar = self.mktemp('--suffix=.tar.gz') self.upload_file(local_tar, remote_tar) untar = self.run('cd %s && tar -xzf %s' % (remote, remote_tar)) message = untar.recvrepeat(2) if untar.wait() != 0: self.error("Could not untar %r on the remote end\n%s" % (remote_tar, message)) def upload(self, file_or_directory, remote=None): """upload(file_or_directory, remote=None) Upload a file or directory to the remote host. Arguments: file_or_directory(str): Path to the file or directory to download. remote(str): Local path to store the data. By default, uses the working directory. """ if isinstance(file_or_directory, str): file_or_directory = os.path.expanduser(file_or_directory) file_or_directory = os.path.expandvars(file_or_directory) if os.path.isfile(file_or_directory): return self.upload_file(file_or_directory, remote) if os.path.isdir(file_or_directory): return self.upload_dir(file_or_directory, remote) self.error('%r does not exist' % file_or_directory) def download(self, file_or_directory, local=None): """download(file_or_directory, local=None) Download a file or directory from the remote host. Arguments: file_or_directory(str): Path to the file or directory to download. local(str): Local path to store the data. By default, uses the current directory. """ if not self.sftp: self.error("Cannot determine remote file type without SFTP") with self.system('test -d ' + sh_string(file_or_directory)) as io: is_dir = io.wait() if 0 == is_dir: self.download_dir(file_or_directory, local) else: self.download_file(file_or_directory, local) put = upload get = download def unlink(self, file): """unlink(file) Delete the file on the remote host Arguments: file(str): Path to the file """ if not self.sftp: self.error("unlink() is only supported if SFTP is supported") return self.sftp.unlink(file) def libs(self, remote, directory = None): """Downloads the libraries referred to by a file. This is done by running ldd on the remote server, parsing the output and downloading the relevant files. The directory argument specified where to download the files. This defaults to './$HOSTNAME' where $HOSTNAME is the hostname of the remote server.""" libs = self._libs_remote(remote) remote = context._decode(self.readlink('-f',remote).strip()) libs[remote] = 0 if directory is None: directory = self.host directory = os.path.realpath(directory) res = {} seen = set() for lib, addr in libs.items(): local = os.path.realpath(os.path.join(directory, '.' + os.path.sep + lib)) if not local.startswith(directory): self.warning('This seems fishy: %r' % lib) continue misc.mkdir_p(os.path.dirname(local)) if lib not in seen: self.download_file(lib, local) seen.add(lib) res[local] = addr return res def interactive(self, shell=None): """Create an interactive session. This is a simple wrapper for creating a new :class:`pwnlib.tubes.ssh.ssh_channel` object and calling :meth:`pwnlib.tubes.ssh.ssh_channel.interactive` on it.""" s = self.shell(shell) if self.cwd != '.': cmd = 'cd ' + sh_string(self.cwd) s.sendline(cmd) s.interactive() s.close() def set_working_directory(self, wd = None, symlink = False): """Sets the working directory in which future commands will be run (via ssh.run) and to which files will be uploaded/downloaded from if no path is provided Note: This uses ``mktemp -d`` under the covers, sets permissions on the directory to ``0700``. This means that setuid binaries will **not** be able to access files created in this directory. In order to work around this, we also ``chmod +x`` the directory. Arguments: wd(string): Working directory. Default is to auto-generate a directory based on the result of running 'mktemp -d' on the remote machine. symlink(bool,str): Create symlinks in the new directory. The default value, ``False``, implies that no symlinks should be created. A string value is treated as a path that should be symlinked. It is passed directly to the shell on the remote end for expansion, so wildcards work. Any other value is treated as a boolean, where ``True`` indicates that all files in the "old" working directory should be symlinked. Examples: >>> s = ssh(host='example.pwnme') >>> cwd = s.set_working_directory() >>> s.ls() b'' >>> context._decode(s.pwd()) == cwd True >>> s = ssh(host='example.pwnme') >>> homedir = s.pwd() >>> _=s.touch('foo') >>> _=s.set_working_directory() >>> assert s.ls() == b'' >>> _=s.set_working_directory(homedir) >>> assert b'foo' in s.ls().split() >>> _=s.set_working_directory(symlink=True) >>> assert b'foo' in s.ls().split() >>> assert homedir != s.pwd() >>> symlink=os.path.join(homedir,b'*') >>> _=s.set_working_directory(symlink=symlink) >>> assert b'foo' in s.ls().split() >>> assert homedir != s.pwd() """ status = 0 if symlink and not isinstance(symlink, (six.binary_type, six.text_type)): symlink = os.path.join(self.pwd(), b'*') if not hasattr(symlink, 'encode') and hasattr(symlink, 'decode'): symlink = symlink.decode('utf-8') if not wd: wd, status = self.run_to_end('x=$(mktemp -d) && cd $x && chmod +x . && echo $PWD', wd='.') wd = wd.strip() if status: self.error("Could not generate a temporary directory (%i)\n%s" % (status, wd)) else: cmd = b'ls ' + sh_string(wd) _, status = self.run_to_end(cmd, wd = '.') if status: self.error("%r does not appear to exist" % wd) if not isinstance(wd, str): wd = wd.decode('utf-8') self.cwd = wd self.info("Working directory: %r" % self.cwd) if symlink: self.ln('-s', symlink, '.') return wd def write(self, path, data): """Wrapper around upload_data to match :func:`pwnlib.util.misc.write`""" return self.upload_data(data, path) def read(self, path): """Wrapper around download_data to match :func:`pwnlib.util.misc.read`""" return self.download_data(path) def _init_remote_platform_info(self): r"""Fills _platform_info, e.g.: :: {'distro': 'Ubuntu\n', 'distro_ver': '14.04\n', 'machine': 'x86_64', 'node': 'pwnable.kr', 'processor': 'x86_64', 'release': '3.11.0-12-generic', 'system': 'linux', 'version': '#19-ubuntu smp wed oct 9 16:20:46 utc 2013'} """ if self._platform_info: return def preexec(): import platform print('\n'.join(platform.uname())) with context.quiet: with self.process('true', preexec_fn=preexec) as io: self._platform_info = { 'system': io.recvline().lower().strip().decode(), 'node': io.recvline().lower().strip().decode(), 'release': io.recvline().lower().strip().decode(), 'version': io.recvline().lower().strip().decode(), 'machine': io.recvline().lower().strip().decode(), 'processor': io.recvline().lower().strip().decode(), 'distro': 'Unknown', 'distro_ver': '' } try: if not self.which('lsb_release'): return with self.process(['lsb_release', '-irs']) as io: lsb_info = io.recvall().strip().decode() self._platform_info['distro'], self._platform_info['distro_ver'] = lsb_info.split() except Exception: pass @property def os(self): """:class:`str`: Operating System of the remote machine.""" try: self._init_remote_platform_info() with context.local(os=self._platform_info['system']): return context.os except Exception: return "Unknown" @property def arch(self): """:class:`str`: CPU Architecture of the remote machine.""" try: self._init_remote_platform_info() with context.local(arch=self._platform_info['machine']): return context.arch except Exception: return "Unknown" @property def bits(self): """:class:`str`: Pointer size of the remote machine.""" try: with context.local(): context.clear() context.arch = self.arch return context.bits except Exception: return context.bits @property def version(self): """:class:`tuple`: Kernel version of the remote machine.""" try: self._init_remote_platform_info() vers = self._platform_info['release'] # 3.11.0-12-generic expr = r'([0-9]+\.?)+' vers = re.search(expr, vers).group() return tuple(map(int, vers.split('.'))) except Exception: return (0,0,0) @property def distro(self): """:class:`tuple`: Linux distribution name and release.""" try: self._init_remote_platform_info() return (self._platform_info['distro'], self._platform_info['distro_ver']) except Exception: return ("Unknown", "Unknown") @property def aslr(self): """:class:`bool`: Whether ASLR is enabled on the system. Example: >>> s = ssh("travis", "example.pwnme") >>> s.aslr True """ if self._aslr is None: if self.os != 'linux': self.warn_once("Only Linux is supported for ASLR checks.") self._aslr = False else: with context.quiet: rvs = self.read('/proc/sys/kernel/randomize_va_space') self._aslr = not rvs.startswith(b'0') return self._aslr @property def aslr_ulimit(self): """:class:`bool`: Whether the entropy of 32-bit processes can be reduced with ulimit.""" import pwnlib.elf.elf import pwnlib.shellcraft if self._aslr_ulimit is not None: return self._aslr_ulimit # This test must run a 32-bit binary, fix the architecture arch = { 'amd64': 'i386', 'aarch64': 'arm' }.get(self.arch, self.arch) with context.local(arch=arch, bits=32, os=self.os, aslr=True): with context.quiet: try: sc = pwnlib.shellcraft.cat('/proc/self/maps') \ + pwnlib.shellcraft.exit(0) elf = pwnlib.elf.elf.ELF.from_assembly(sc, shared=True) except Exception: self.warn_once("Can't determine ulimit ASLR status") self._aslr_ulimit = False return self._aslr_ulimit def preexec(): import resource try: resource.setrlimit(resource.RLIMIT_STACK, (-1, -1)) except Exception: pass # Move to a new temporary directory cwd = self.cwd tmp = self.set_working_directory() try: self.upload(elf.path, './aslr-test') except IOError: self.warn_once("Couldn't check ASLR ulimit trick") self._aslr_ulimit = False return False self.process(['chmod', '+x', './aslr-test']).wait() maps = self.process(['./aslr-test'], preexec_fn=preexec).recvall() # Move back to the old directory self.cwd = cwd # Clean up the files self.process(['rm', '-rf', tmp]).wait() # Check for 555555000 (1/3 of the address space for PAE) # and for 40000000 (1/3 of the address space with 3BG barrier) self._aslr_ulimit = bool(b'55555000' in maps or b'40000000' in maps) return self._aslr_ulimit def _checksec_cache(self, value=None): path = self._get_cachefile('%s-%s' % (self.host, self.port)) if value is not None: with open(path, 'w+') as f: f.write(value) elif os.path.exists(path): with open(path, 'r+') as f: return f.read() def checksec(self, banner=True): """checksec() Prints a helpful message about the remote system. Arguments: banner(bool): Whether to print the path to the ELF binary. """ cached = self._checksec_cache() if cached: return cached red = text.red green = text.green yellow = text.yellow res = [ "%s@%s:" % (self.user, self.host), "Distro".ljust(10) + ' '.join(self.distro), "OS:".ljust(10) + self.os, "Arch:".ljust(10) + self.arch, "Version:".ljust(10) + '.'.join(map(str, self.version)), "ASLR:".ljust(10) + { True: green("Enabled"), False: red("Disabled") }[self.aslr] ] if self.aslr_ulimit: res += [ "Note:".ljust(10) + red("Susceptible to ASLR ulimit trick (CVE-2016-3672)")] cached = '\n'.join(res) self._checksec_cache(cached) return cached
transaction.py
#!/usr/bin/env python3 # # Electrum - lightweight Bitcoin client # Copyright (C) 2011 Thomas Voegtlin # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation files # (the "Software"), to deal in the Software without restriction, # including without limitation the rights to use, copy, modify, merge, # publish, distribute, sublicense, and/or sell copies of the Software, # and to permit persons to whom the Software is furnished to do so, # subject to the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS # BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # Note: The deserialization code originally comes from ABE. from .util import print_error, profiler from .caches import ExpiringCache from .bitcoin import * from .address import (PublicKey, Address, Script, ScriptOutput, hash160, UnknownAddress, OpCodes as opcodes) from . import schnorr from . import util from . import ecc_fast import struct import warnings from typing import NamedTuple, List, Callable,Union # # Workalike python implementation of Bitcoin's CDataStream class. # from .keystore import xpubkey_to_address, xpubkey_to_pubkey NO_SIGNATURE = 'ff' class SerializationError(Exception): """ Thrown when there's a problem deserializing or serializing """ class InputValueMissing(Exception): """ thrown when the value of an input is needed but not present """ class BCDataStream(object): def __init__(self): self.input = None self.read_cursor = 0 def clear(self): self.input = None self.read_cursor = 0 def write(self, _bytes): # Initialize with string of _bytes if self.input is None: self.input = bytearray(_bytes) else: self.input += bytearray(_bytes) def read_string(self, encoding='ascii'): # Strings are encoded depending on length: # 0 to 252 : 1-byte-length followed by bytes (if any) # 253 to 65,535 : byte'253' 2-byte-length followed by bytes # 65,536 to 4,294,967,295 : byte '254' 4-byte-length followed by bytes # ... and the Bitcoin client is coded to understand: # greater than 4,294,967,295 : byte '255' 8-byte-length followed by bytes of string # ... but I don't think it actually handles any strings that big. if self.input is None: raise SerializationError("call write(bytes) before trying to deserialize") length = self.read_compact_size() return self.read_bytes(length).decode(encoding) def write_string(self, string, encoding='ascii'): string = to_bytes(string, encoding) # Length-encoded as with read-string self.write_compact_size(len(string)) self.write(string) def read_bytes(self, length): try: result = self.input[self.read_cursor:self.read_cursor+length] self.read_cursor += length return result except IndexError: raise SerializationError("attempt to read past end of buffer") return '' def can_read_more(self) -> bool: if not self.input: return False return self.read_cursor < len(self.input) def read_boolean(self): return self.read_bytes(1)[0] != chr(0) def read_int16(self): return self._read_num('<h') def read_uint16(self): return self._read_num('<H') def read_int32(self): return self._read_num('<i') def read_uint32(self): return self._read_num('<I') def read_int64(self): return self._read_num('<q') def read_uint64(self): return self._read_num('<Q') def write_boolean(self, val): return self.write(chr(1) if val else chr(0)) def write_int16(self, val): return self._write_num('<h', val) def write_uint16(self, val): return self._write_num('<H', val) def write_int32(self, val): return self._write_num('<i', val) def write_uint32(self, val): return self._write_num('<I', val) def write_int64(self, val): return self._write_num('<q', val) def write_uint64(self, val): return self._write_num('<Q', val) def read_compact_size(self): try: size = self.input[self.read_cursor] self.read_cursor += 1 if size == 253: size = self._read_num('<H') elif size == 254: size = self._read_num('<I') elif size == 255: size = self._read_num('<Q') return size except IndexError: raise SerializationError("attempt to read past end of buffer") def write_compact_size(self, size): if size < 0: raise SerializationError("attempt to write size < 0") elif size < 253: self.write(bytes([size])) elif size < 2**16: self.write(b'\xfd') self._write_num('<H', size) elif size < 2**32: self.write(b'\xfe') self._write_num('<I', size) elif size < 2**64: self.write(b'\xff') self._write_num('<Q', size) def _read_num(self, format): try: (i,) = struct.unpack_from(format, self.input, self.read_cursor) self.read_cursor += struct.calcsize(format) except Exception as e: raise SerializationError(e) return i def _write_num(self, format, num): s = struct.pack(format, num) self.write(s) # This function comes from bitcointools, bct-LICENSE.txt. def long_hex(bytes): return bytes.encode('hex_codec') # This function comes from bitcointools, bct-LICENSE.txt. def short_hex(bytes): t = bytes.encode('hex_codec') if len(t) < 11: return t return t[0:4]+"..."+t[-4:] def match_decoded(decoded, to_match): if len(decoded) != len(to_match): return False; for i in range(len(decoded)): if to_match[i] == opcodes.OP_PUSHDATA4 and decoded[i][0] <= opcodes.OP_PUSHDATA4 and decoded[i][0]>0: continue # Opcodes below OP_PUSHDATA4 all just push data onto stack, and are equivalent. if to_match[i] != decoded[i][0]: return False return True def parse_sig(x_sig): return [None if x == NO_SIGNATURE else x for x in x_sig] def safe_parse_pubkey(x): try: return xpubkey_to_pubkey(x) except: return x def parse_scriptSig(d, _bytes): try: decoded = Script.get_ops(_bytes) except Exception as e: # coinbase transactions raise an exception print_error("cannot find address in input script", bh2u(_bytes)) return match = [ opcodes.OP_PUSHDATA4 ] if match_decoded(decoded, match): item = decoded[0][1] # payto_pubkey d['type'] = 'p2pk' d['signatures'] = [bh2u(item)] d['num_sig'] = 1 d['x_pubkeys'] = ["(pubkey)"] d['pubkeys'] = ["(pubkey)"] return # non-generated TxIn transactions push a signature # (seventy-something bytes) and then their public key # (65 bytes) onto the stack: match = [ opcodes.OP_PUSHDATA4, opcodes.OP_PUSHDATA4 ] if match_decoded(decoded, match): sig = bh2u(decoded[0][1]) x_pubkey = bh2u(decoded[1][1]) try: signatures = parse_sig([sig]) pubkey, address = xpubkey_to_address(x_pubkey) except: print_error("cannot find address in input script", bh2u(_bytes)) return d['type'] = 'p2pkh' d['signatures'] = signatures d['x_pubkeys'] = [x_pubkey] d['num_sig'] = 1 d['pubkeys'] = [pubkey] d['address'] = address return # p2sh transaction, m of n match = [ opcodes.OP_0 ] + [ opcodes.OP_PUSHDATA4 ] * (len(decoded) - 1) if not match_decoded(decoded, match): print_error("cannot find address in input script", bh2u(_bytes)) return x_sig = [bh2u(x[1]) for x in decoded[1:-1]] m, n, x_pubkeys, pubkeys, redeemScript = parse_redeemScript(decoded[-1][1]) # write result in d d['type'] = 'p2sh' d['num_sig'] = m d['signatures'] = parse_sig(x_sig) d['x_pubkeys'] = x_pubkeys d['pubkeys'] = pubkeys d['redeemScript'] = redeemScript d['address'] = Address.from_P2SH_hash(hash160(redeemScript)) def parse_redeemScript(s): dec2 = Script.get_ops(s) # the following throw exception when redeemscript has one or zero opcodes m = dec2[0][0] - opcodes.OP_1 + 1 n = dec2[-2][0] - opcodes.OP_1 + 1 op_m = opcodes.OP_1 + m - 1 op_n = opcodes.OP_1 + n - 1 match_multisig = [ op_m ] + [opcodes.OP_PUSHDATA4]*n + [ op_n, opcodes.OP_CHECKMULTISIG ] if not match_decoded(dec2, match_multisig): # causes exception in caller when mismatched print_error("cannot find address in input script", bh2u(s)) return x_pubkeys = [bh2u(x[1]) for x in dec2[1:-2]] pubkeys = [safe_parse_pubkey(x) for x in x_pubkeys] redeemScript = Script.multisig_script(m, [bytes.fromhex(p) for p in pubkeys]) return m, n, x_pubkeys, pubkeys, redeemScript def get_address_from_output_script(_bytes): try: decoded = Script.get_ops(_bytes) # The Genesis Block, self-payments, and pay-by-IP-address payments look like: # 65 BYTES:... CHECKSIG match = [ opcodes.OP_PUSHDATA4, opcodes.OP_CHECKSIG ] if match_decoded(decoded, match): return TYPE_PUBKEY, PublicKey.from_pubkey(decoded[0][1]) # Pay-by-Bitcoin-address TxOuts look like: # DUP HASH160 20 BYTES:... EQUALVERIFY CHECKSIG match = [ opcodes.OP_DUP, opcodes.OP_HASH160, opcodes.OP_PUSHDATA4, opcodes.OP_EQUALVERIFY, opcodes.OP_CHECKSIG ] if match_decoded(decoded, match): return TYPE_ADDRESS, Address.from_P2PKH_hash(decoded[2][1]) # p2sh match = [ opcodes.OP_HASH160, opcodes.OP_PUSHDATA4, opcodes.OP_EQUAL ] if match_decoded(decoded, match): return TYPE_ADDRESS, Address.from_P2SH_hash(decoded[1][1]) except Exception as e: print_error('{}: Failed to parse tx ouptut {}. Exception was: {}'.format(__name__, _bytes.hex(), repr(e))) pass return TYPE_SCRIPT, ScriptOutput.protocol_factory(bytes(_bytes)) def parse_input(vds): d = {} prevout_hash = hash_encode(vds.read_bytes(32)) prevout_n = vds.read_uint32() scriptSig = vds.read_bytes(vds.read_compact_size()) sequence = vds.read_uint32() d['prevout_hash'] = prevout_hash d['prevout_n'] = prevout_n d['sequence'] = sequence d['address'] = UnknownAddress() if prevout_hash == '00'*32: d['type'] = 'coinbase' d['scriptSig'] = bh2u(scriptSig) else: d['x_pubkeys'] = [] d['pubkeys'] = [] d['signatures'] = {} d['address'] = None d['type'] = 'unknown' d['num_sig'] = 0 d['scriptSig'] = bh2u(scriptSig) try: parse_scriptSig(d, scriptSig) except Exception as e: print_error('{}: Failed to parse tx input {}:{}, probably a p2sh (non multisig?). Exception was: {}'.format(__name__, prevout_hash, prevout_n, repr(e))) # that whole heuristic codepath is fragile; just ignore it when it dies. # failing tx examples: # 1c671eb25a20aaff28b2fa4254003c201155b54c73ac7cf9c309d835deed85ee # 08e1026eaf044127d7103415570afd564dfac3131d7a5e4b645f591cd349bb2c # override these once more just to make sure d['address'] = UnknownAddress() d['type'] = 'unknown' if not Transaction.is_txin_complete(d): d['value'] = vds.read_uint64() return d def parse_output(vds, i): d = {} d['value'] = vds.read_int64() scriptPubKey = vds.read_bytes(vds.read_compact_size()) d['type'], d['address'] = get_address_from_output_script(scriptPubKey) d['scriptPubKey'] = bh2u(scriptPubKey) d['prevout_n'] = i return d def deserialize(raw): vds = BCDataStream() vds.write(bfh(raw)) d = {} start = vds.read_cursor d['version'] = vds.read_int32() n_vin = vds.read_compact_size() d['inputs'] = [parse_input(vds) for i in range(n_vin)] n_vout = vds.read_compact_size() d['outputs'] = [parse_output(vds, i) for i in range(n_vout)] d['lockTime'] = vds.read_uint32() if vds.can_read_more(): raise SerializationError('extra junk at the end') return d # pay & redeem scripts def multisig_script(public_keys, m): n = len(public_keys) assert n <= 15 assert m <= n op_m = format(opcodes.OP_1 + m - 1, 'x') op_n = format(opcodes.OP_1 + n - 1, 'x') keylist = [op_push(len(k)//2) + k for k in public_keys] return op_m + ''.join(keylist) + op_n + 'ae' class TxOutput(NamedTuple): type: int address: str value: Union[int, str] # str when the output is set to max: '!' class Transaction: SIGHASH_FORKID = 0x40 # do not use this; deprecated FORKID = 0x000000 # do not use this; deprecated def __str__(self): if self.raw is None: self.raw = self.serialize() return self.raw def __init__(self, raw, sign_schnorr=False): if raw is None: self.raw = None elif isinstance(raw, str): self.raw = raw.strip() if raw else None elif isinstance(raw, dict): self.raw = raw['hex'] else: raise BaseException("cannot initialize transaction", raw) self._inputs = None self._outputs = None self.locktime = 0 self.version = 1 self._sign_schnorr = sign_schnorr # attribute used by HW wallets to tell the hw keystore about any outputs # in the tx that are to self (change), etc. See wallet.py add_hw_info # which writes to this dict and the various hw wallet plugins which # read this dict. self.output_info = dict() # Ephemeral meta-data used internally to keep track of interesting # things. This is currently written-to by coinchooser to tell UI code # about 'dust_to_fee', which is change that's too small to go to change # outputs (below dust threshold) and needed to go to the fee. # # It is also used to store the 'fetched_inputs' which are asynchronously # retrieved inputs (by retrieving prevout_hash tx's), see #`fetch_input_data`. # # Values in this dict are advisory only and may or may not always be # there! self.ephemeral = dict() def set_sign_schnorr(self, b): self._sign_schnorr = b def update(self, raw): self.raw = raw self._inputs = None self.deserialize() def inputs(self): if self._inputs is None: self.deserialize() return self._inputs def outputs(self): if self._outputs is None: self.deserialize() return self._outputs @classmethod def get_sorted_pubkeys(self, txin): # sort pubkeys and x_pubkeys, using the order of pubkeys if txin['type'] == 'coinbase': return [], [] x_pubkeys = txin['x_pubkeys'] pubkeys = txin.get('pubkeys') if pubkeys is None: pubkeys = [xpubkey_to_pubkey(x) for x in x_pubkeys] pubkeys, x_pubkeys = zip(*sorted(zip(pubkeys, x_pubkeys))) txin['pubkeys'] = pubkeys = list(pubkeys) txin['x_pubkeys'] = x_pubkeys = list(x_pubkeys) return pubkeys, x_pubkeys def update_signatures(self, signatures): """Add new signatures to a transaction `signatures` is expected to be a list of hex encoded sig strings with *no* sighash byte at the end (implicitly always 0x41 (SIGHASH_FORKID|SIGHASH_ALL); will be added by this function). signatures[i] is intended for self._inputs[i]. The signature will be matched with the appropriate pubkey automatically in the case of multisignature wallets. This function is used by the Trezor, KeepKey, etc to update the transaction with signatures form the device. Note this function supports both Schnorr and ECDSA signatures, but as yet no hardware wallets are signing Schnorr. """ if self.is_complete(): return if not isinstance(signatures, (tuple, list)): raise Exception('API changed: update_signatures expects a list.') if len(self.inputs()) != len(signatures): raise Exception('expected {} signatures; got {}'.format(len(self.inputs()), len(signatures))) for i, txin in enumerate(self.inputs()): pubkeys, x_pubkeys = self.get_sorted_pubkeys(txin) sig = signatures[i] if not isinstance(sig, str): raise ValueError("sig was bytes, expected string") # sig_final is the signature with the sighashbyte at the end (0x41) sig_final = sig if sig_final in txin.get('signatures'): # skip if we already have this signature continue pre_hash = Hash(bfh(self.serialize_preimage(i))) sig_bytes = bfh(sig) added = False reason = [] for j, pubkey in enumerate(pubkeys): # see which pubkey matches this sig (in non-multisig only 1 pubkey, in multisig may be multiple pubkeys) if self.verify_signature(bfh(pubkey), sig_bytes, pre_hash, reason): print_error("adding sig", i, j, pubkey, sig_final) self._inputs[i]['signatures'][j] = sig_final added = True if not added: resn = ', '.join(reversed(reason)) if reason else '' print_error("failed to add signature {} for any pubkey for reason(s): '{}' ; pubkey(s) / sig / pre_hash = ".format(i, resn), pubkeys, '/', sig, '/', bh2u(pre_hash)) # redo raw self.raw = self.serialize() def is_schnorr_signed(self, input_idx): ''' Return True IFF any of the signatures for a particular input are Schnorr signatures (Schnorr signatures are always 64 bytes + 1) ''' if (isinstance(self._inputs, (list, tuple)) and input_idx < len(self._inputs) and self._inputs[input_idx]): # Schnorr sigs are always 64 bytes. However the sig has a hash byte # at the end, so that's 65. Plus we are hex encoded, so 65*2=130 return any(isinstance(sig, (str, bytes)) and len(sig) == 130 for sig in self._inputs[input_idx].get('signatures', [])) return False def deserialize(self): if self.raw is None: return if self._inputs is not None: return d = deserialize(self.raw) self._inputs = d['inputs'] self._outputs = [(x['type'], x['address'], x['value']) for x in d['outputs']] assert all(isinstance(output[1], (PublicKey, Address, ScriptOutput)) for output in self._outputs) self.locktime = d['lockTime'] self.version = d['version'] return d @classmethod def from_io(klass, inputs, outputs, locktime=0, sign_schnorr=False): assert all(isinstance(output[1], (PublicKey, Address, ScriptOutput)) for output in outputs) self = klass(None) self._inputs = inputs self._outputs = outputs.copy() self.locktime = locktime self.set_sign_schnorr(sign_schnorr) return self @classmethod def pay_script(self, output_type, addr): if output_type == TYPE_SCRIPT: return addr elif output_type == TYPE_ADDRESS: return address_to_script(str(addr)) elif output_type == TYPE_PUBKEY: return public_key_to_p2pk_script(addr) else: raise TypeError('Unknown output type') @classmethod def estimate_pubkey_size_from_x_pubkey(cls, x_pubkey): try: if x_pubkey[0:2] in ['02', '03']: # compressed pubkey return 0x21 elif x_pubkey[0:2] == '04': # uncompressed pubkey return 0x41 elif x_pubkey[0:2] == 'ff': # bip32 extended pubkey return 0x21 elif x_pubkey[0:2] == 'fe': # old electrum extended pubkey return 0x41 except Exception as e: pass return 0x21 # just guess it is compressed @classmethod def estimate_pubkey_size_for_txin(cls, txin): pubkeys = txin.get('pubkeys', []) x_pubkeys = txin.get('x_pubkeys', []) if pubkeys and len(pubkeys) > 0: return cls.estimate_pubkey_size_from_x_pubkey(pubkeys[0]) elif x_pubkeys and len(x_pubkeys) > 0: return cls.estimate_pubkey_size_from_x_pubkey(x_pubkeys[0]) else: return 0x21 # just guess it is compressed @classmethod def get_siglist(self, txin, estimate_size=False, sign_schnorr=False): # if we have enough signatures, we use the actual pubkeys # otherwise, use extended pubkeys (with bip32 derivation) num_sig = txin.get('num_sig', 1) if estimate_size: pubkey_size = self.estimate_pubkey_size_for_txin(txin) pk_list = ["00" * pubkey_size] * len(txin.get('x_pubkeys', [None])) # we assume that signature will be 0x48 bytes long if ECDSA, 0x41 if Schnorr siglen = 0x48 sig_list = [ "00" * siglen ] * num_sig else: pubkeys, x_pubkeys = self.get_sorted_pubkeys(txin) x_signatures = txin['signatures'] signatures = list(filter(None, x_signatures)) is_complete = len(signatures) == num_sig if is_complete: pk_list = pubkeys sig_list = signatures else: pk_list = x_pubkeys sig_list = [sig if sig else NO_SIGNATURE for sig in x_signatures] return pk_list, sig_list @classmethod def input_script(self, txin, estimate_size=False, sign_schnorr=False): _type = txin['type'] if _type == 'coinbase': return txin['scriptSig'] pubkeys, sig_list = self.get_siglist(txin, estimate_size, sign_schnorr=False) print_error(_type) script = ''.join(push_script(x) for x in sig_list) if _type == 'p2pk': pass elif _type == 'p2sh': # put op_0 before script script = '00' + script redeem_script = multisig_script(pubkeys, txin['num_sig']) script += push_script(redeem_script) elif _type == 'p2pkh': script += push_script(pubkeys[0]) elif _type == 'unknown': return txin['scriptSig'] return script @classmethod def is_txin_complete(self, txin): num_sig = txin.get('num_sig', 1) x_signatures = txin['signatures'] signatures = list(filter(None, x_signatures)) return len(signatures) == num_sig @classmethod def get_preimage_script(self, txin): preimage_script = txin.get('preimage_script', None) if preimage_script is not None: return preimage_script pubkeys, x_pubkeys = self.get_sorted_pubkeys(txin) if txin['type'] in ['p2sh', 'p2wsh', 'p2wsh-p2sh']: return multisig_script(pubkeys, txin['num_sig']) elif txin['type'] in ['p2pkh', 'p2wpkh', 'p2wpkh-p2sh']: pubkey = pubkeys[0] pkh = bh2u(hash_160(bfh(pubkey))) return pubkeyhash_to_p2pkh_script(pkh) elif txin['type'] == 'p2pk': pubkey = pubkeys[0] return public_key_to_p2pk_script(pubkey) else: raise TypeError('Unknown txin type', txin['type']) @classmethod def serialize_outpoint(self, txin): return bh2u(bfh(txin['prevout_hash'])[::-1]) + int_to_hex(txin['prevout_n'], 4) @classmethod def serialize_input(self, txin, script, estimate_size=False): # Prev hash and index s = self.serialize_outpoint(txin) # Script length, script, sequence s += var_int(len(script)//2) s += script s += int_to_hex(txin.get('sequence', 0xffffffff - 1), 4) # offline signing needs to know the input value # if ('value' in txin # Legacy txs # and not (estimate_size or self.is_txin_complete(txin))): # s += int_to_hex(txin['value'], 8) return s def BIP_LI01_sort(self): # See https://github.com/kristovatlas/rfc/blob/master/bips/bip-li01.mediawiki self._inputs.sort(key = lambda i: (i['prevout_hash'], i['prevout_n'])) self._outputs.sort(key = lambda o: (o[2], self.pay_script(o[0], o[1]))) def serialize_output(self, output): output_type, addr, amount = output s = int_to_hex(amount, 8) script = self.pay_script(output_type, str(addr)) s += var_int(len(script)//2) s += script return s @classmethod def nHashType(cls): '''Hash type in hex.''' warnings.warn("warning: deprecated tx.nHashType()", FutureWarning, stacklevel=2) return 0x01 | (cls.SIGHASH_FORKID + (cls.FORKID << 8)) def serialize_preimage(self, i): nVersion = int_to_hex(self.version, 4) nHashType = int_to_hex(1, 4) nLocktime = int_to_hex(self.locktime, 4) inputs = self.inputs() outputs = self.outputs() txin = inputs[i] txins = var_int(len(inputs)) + ''.join(self.serialize_input(txin, self.get_preimage_script(txin) if i==k else '') for k, txin in enumerate(inputs)) txouts = var_int(len(outputs)) + ''.join(self.serialize_output(o) for o in outputs) nVersion = int_to_hex(self.version, 4) preimage = nVersion + txins + txouts + nLocktime + nHashType return preimage def serialize(self, estimate_size=False): nVersion = int_to_hex(self.version, 4) nLocktime = int_to_hex(self.locktime, 4) inputs = self.inputs() outputs = self.outputs() txins = var_int(len(inputs)) + ''.join(self.serialize_input(txin, self.input_script(txin, estimate_size, self._sign_schnorr), estimate_size) for txin in inputs) txouts = var_int(len(outputs)) + ''.join(self.serialize_output(o) for o in outputs) return nVersion + txins + txouts + nLocktime def hash(self): warnings.warn("warning: deprecated tx.hash()", FutureWarning, stacklevel=2) return self.txid() def txid(self): if not self.is_complete(): return None ser = self.serialize() return self._txid(ser) def txid_fast(self): ''' Returns the txid by immediately calculating it from self.raw, which is faster than calling txid() which does a full re-serialize each time. Note this should only be used for tx's that you KNOW are complete and that don't contain our funny serialization hacks. (The is_complete check is also not performed here because that potentially can lead to unwanted tx deserialization). ''' if self.raw: return self._txid(self.raw) return self.txid() @staticmethod def _txid(raw_hex : str) -> str: return bh2u(Hash(bfh(raw_hex))[::-1]) def add_inputs(self, inputs): self._inputs.extend(inputs) self.raw = None def add_outputs(self, outputs): assert all(isinstance(output[1], (PublicKey, Address, ScriptOutput)) for output in outputs) self._outputs.extend(outputs) self.raw = None def input_value(self): return sum(x['value'] for x in (self.fetched_inputs() or self.inputs())) def output_value(self): return sum(val for tp, addr, val in self.outputs()) def get_fee(self): return self.input_value() - self.output_value() @profiler def estimated_size(self): '''Return an estimated tx size in bytes.''' return (len(self.serialize(True)) // 2 if not self.is_complete() or self.raw is None else len(self.raw) // 2) # ASCII hex string @classmethod def estimated_input_size(self, txin, sign_schnorr=False): '''Return an estimated of serialized input size in bytes.''' script = self.input_script(txin, True, sign_schnorr=False) return len(self.serialize_input(txin, script, True)) // 2 # ASCII hex string def signature_count(self): r = 0 s = 0 for txin in self.inputs(): if txin['type'] == 'coinbase': continue signatures = list(filter(None, txin.get('signatures',[]))) s += len(signatures) r += txin.get('num_sig',-1) return s, r def is_complete(self): s, r = self.signature_count() return r == s @staticmethod def verify_signature(pubkey, sig, msghash, reason=None): ''' Given a pubkey (bytes), signature (bytes -- without sighash byte), and a sha256d message digest, returns True iff the signature is good for the given public key, False otherwise. Does not raise normally unless given bad or garbage arguments. Optional arg 'reason' should be a list which will have a string pushed at the front (failure reason) on False return. ''' if (any(not arg or not isinstance(arg, bytes) for arg in (pubkey, sig, msghash)) or len(msghash) != 32): raise ValueError('bad arguments to verify_signature') if len(sig) == 64: # Schnorr signatures are always exactly 64 bytes return schnorr.verify(pubkey, sig, msghash) else: from ecdsa import BadSignatureError, BadDigestError from ecdsa.der import UnexpectedDER # ECDSA signature try: pubkey_point = ser_to_point(pubkey) vk = MyVerifyingKey.from_public_point(pubkey_point, curve=SECP256k1) if vk.verify_digest(sig, msghash, sigdecode = ecdsa.util.sigdecode_der): return True except (AssertionError, ValueError, TypeError, BadSignatureError, BadDigestError, UnexpectedDER) as e: # ser_to_point will fail if pubkey is off-curve, infinity, or garbage. # verify_digest may also raise BadDigestError and BadSignatureError if isinstance(reason, list): reason.insert(0, repr(e)) except BaseException as e: print_error("[Transaction.verify_signature] unexpected exception", repr(e)) if isinstance(reason, list): reason.insert(0, repr(e)) return False @staticmethod def _ecdsa_sign(sec, pre_hash): pkey = regenerate_key(sec) secexp = pkey.secret private_key = MySigningKey.from_secret_exponent(secexp, curve = SECP256k1) public_key = private_key.get_verifying_key() sig = private_key.sign_digest_deterministic(pre_hash, hashfunc=hashlib.sha256, sigencode = ecdsa.util.sigencode_der) assert public_key.verify_digest(sig, pre_hash, sigdecode = ecdsa.util.sigdecode_der) return sig @staticmethod def _schnorr_sign(pubkey, sec, pre_hash): pubkey = bytes.fromhex(pubkey) sig = schnorr.sign(sec, pre_hash) assert schnorr.verify(pubkey, sig, pre_hash) # verify what we just signed return sig def sign(self, keypairs): for i, txin in enumerate(self.inputs()): num = txin['num_sig'] pubkeys, x_pubkeys = self.get_sorted_pubkeys(txin) for j, x_pubkey in enumerate(x_pubkeys): signatures = list(filter(None, txin['signatures'])) if len(signatures) == num: # txin is complete break if x_pubkey in keypairs.keys(): print_error("adding signature for", x_pubkey, "use schnorr?", self._sign_schnorr) sec, compressed = keypairs.get(x_pubkey) pubkey = public_key_from_private_key(sec, compressed) # add signature pre_hash = Hash(bfh(self.serialize_preimage(i))) if self._sign_schnorr: sig = self._schnorr_sign(pubkey, sec, pre_hash) else: sig = self._ecdsa_sign(sec, pre_hash) txin['signatures'][j] = bh2u(sig) + '01' self._inputs[i] = txin print_error("is_complete", self.is_complete()) self.raw = self.serialize() def add_signature_to_txin(self, i, signingPos, sig): txin = self._inputs[i] txin['signatures'][signingPos] = sig txin['scriptSig'] = None # force re-serialization txin['witness'] = None # force re-serialization self.raw = None def sign_txin(self, txin_index, privkey_bytes) -> str: pre_hash = Hash(bfh(self.serialize_preimage(txin_index))) privkey = ecc.ECPrivkey(privkey_bytes) sig = privkey.sign_transaction(pre_hash) sig = bh2u(sig) + '01' return sig def get_outputs(self): """convert pubkeys to addresses""" o = [] for type, addr, v in self.outputs(): o.append((addr,v)) # consider using yield (addr, v) return o def get_output_addresses(self): return [addr for addr, val in self.get_outputs()] def has_address(self, addr): return (addr in self.get_output_addresses()) or (addr in (tx.get("address") for tx in self.inputs())) def is_final(self): return not any([x.get('sequence', 0xffffffff - 1) < 0xffffffff - 1 for x in self.inputs()]) def as_dict(self): if self.raw is None: self.raw = self.serialize() self.deserialize() out = { 'hex': self.raw, 'complete': self.is_complete(), 'final': self.is_final(), } return out # This cache stores foreign (non-wallet) tx's we fetched from the network # for the purposes of the "fetch_input_data" mechanism. Its max size has # been thoughtfully calibrated to provide a decent tradeoff between # memory consumption and UX. # # In even aggressive/pathological cases this cache won't ever exceed # 100MB even when full. [see ExpiringCache.size_bytes() to test it]. # This is acceptable considering this is Python + Qt and it eats memory # anyway.. and also this is 2019 ;). Note that all tx's in this cache # are in the non-deserialized state (hex encoded bytes only) as a memory # savings optimization. Please maintain that invariant if you modify this # code, otherwise the cache may grow to 10x memory consumption if you # put deserialized tx's in here. _fetched_tx_cache = ExpiringCache(maxlen=1000, name="TransactionFetchCache") def fetch_input_data(self, wallet, done_callback=None, done_args=tuple(), prog_callback=None, *, force=False, use_network=True): ''' Fetch all input data and put it in the 'ephemeral' dictionary, under 'fetched_inputs'. This call potentially initiates fetching of prevout_hash transactions from the network for all inputs to this tx. The fetched data is basically used for the Transaction dialog to be able to display fee, actual address, and amount (value) for tx inputs. `wallet` should ideally have a network object, but this function still will work and is still useful if it does not. `done_callback` is called with `done_args` (only if True was returned), upon completion. Note that done_callback won't be called if this function returns False. Also note that done_callback runs in a non-main thread context and as such, if you want to do GUI work from within it, use the appropriate Qt signal/slot mechanism to dispatch work to the GUI. `prog_callback`, if specified, is called periodically to indicate progress after inputs are retrieved, and it is passed a single arg, "percent" (eg: 5.1, 10.3, 26.3, 76.1, etc) to indicate percent progress. Note 1: Results (fetched transactions) are cached, so subsequent calls to this function for the same transaction are cheap. Note 2: Multiple, rapid calls to this function will cause the previous asynchronous fetch operation (if active) to be canceled and only the latest call will result in the invocation of the done_callback if/when it completes. ''' if not self._inputs: return False if force: # forced-run -- start with empty list inps = [] else: # may be a new list or list that was already in dict inps = self.fetched_inputs(require_complete = True) if len(self._inputs) == len(inps): # we already have results, don't do anything. return False eph = self.ephemeral eph['fetched_inputs'] = inps = inps.copy() # paranoia: in case another thread is running on this list # Lazy imports to keep this functionality very self-contained # These modules are always available so no need to globally import them. import threading import queue import time from copy import deepcopy from collections import defaultdict t0 = time.time() t = None cls = __class__ self_txid = self.txid() def doIt(): ''' This function is seemingly complex, but it's really conceptually simple: 1. Fetch all prevouts either from cache (wallet or global tx_cache) 2. Or, if they aren't in either cache, then we will asynchronously queue the raw tx gets to the network in parallel, across *all* our connected servers. This is very fast, and spreads the load around. Tested with a huge tx of 600+ inputs all coming from different prevout_hashes on mainnet, and it's super fast: cd8fcc8ad75267ff9ad314e770a66a9e871be7882b7c05a7e5271c46bfca98bc ''' last_prog = -9999.0 need_dl_txids = defaultdict(list) # the dict of txids we will need to download (wasn't in cache) def prog(i, prog_total=100): ''' notify interested code about progress ''' nonlocal last_prog if prog_callback: prog = ((i+1)*100.0)/prog_total if prog - last_prog > 5.0: prog_callback(prog) last_prog = prog while eph.get('_fetch') == t and len(inps) < len(self._inputs): i = len(inps) inp = deepcopy(self._inputs[i]) typ, prevout_hash, n, addr, value = inp.get('type'), inp.get('prevout_hash'), inp.get('prevout_n'), inp.get('address'), inp.get('value') if not prevout_hash or n is None: raise RuntimeError('Missing prevout_hash and/or prevout_n') if typ != 'coinbase' and (not isinstance(addr, Address) or value is None): tx = cls.tx_cache_get(prevout_hash) or wallet.transactions.get(prevout_hash) if tx: # Tx was in cache or wallet.transactions, proceed # note that the tx here should be in the "not # deserialized" state if tx.raw: # Note we deserialize a *copy* of the tx so as to # save memory. We do not want to deserialize the # cached tx because if we do so, the cache will # contain a deserialized tx which will take up # several times the memory when deserialized due to # Python's memory use being less efficient than the # binary-only raw bytes. So if you modify this code # do bear that in mind. tx = Transaction(tx.raw) try: tx.deserialize() # The below txid check is commented-out as # we trust wallet tx's and the network # tx's that fail this check are never # put in cache anyway. #txid = tx._txid(tx.raw) #if txid != prevout_hash: # sanity check # print_error("fetch_input_data: cached prevout_hash {} != tx.txid() {}, ignoring.".format(prevout_hash, txid)) except Exception as e: print_error("fetch_input_data: WARNING failed to deserialize {}: {}".format(prevout_hash, repr(e))) tx = None else: tx = None print_error("fetch_input_data: WARNING cached tx lacked any 'raw' bytes for {}".format(prevout_hash)) # now, examine the deserialized tx, if it's still good if tx: if n < len(tx.outputs()): outp = tx.outputs()[n] addr, value = outp[1], outp[2] inp['value'] = value inp['address'] = addr print_error("fetch_input_data: fetched cached", i, addr, value) else: print_error("fetch_input_data: ** FIXME ** should never happen -- n={} >= len(tx.outputs())={} for prevout {}".format(n, len(tx.outputs()), prevout_hash)) else: # tx was not in cache or wallet.transactions, mark # it for download below (this branch can also execute # in the unlikely case where there was an error above) need_dl_txids[prevout_hash].append((i, n)) # remember the input# as well as the prevout_n inps.append(inp) # append either cached result or as-yet-incomplete copy of _inputs[i] # Now, download the tx's we didn't find above if network is available # and caller said it's ok to go out ot network.. otherwise just return # what we have if use_network and eph.get('_fetch') == t and wallet.network: callback_funcs_to_cancel = set() try: # the whole point of this try block is the `finally` way below... prog(-1) # tell interested code that progress is now 0% # Next, queue the transaction.get requests, spreading them # out randomly over the connected interfaces q = queue.Queue() q_ct = 0 bad_txids = set() def put_in_queue_and_cache(r): ''' we cache the results directly in the network callback as even if the user cancels the operation, we would like to save the returned tx in our cache, since we did the work to retrieve it anyway. ''' q.put(r) # put the result in the queue no matter what it is txid = '' try: # Below will raise if response was 'error' or # otherwise invalid. Note: for performance reasons # we don't validate the tx here or deserialize it as # this function runs in the network thread and we # don't want to eat up that thread's CPU time # needlessly. Also note the cache doesn't store # deserializd tx's so as to save memory. We # always deserialize a copy when reading the cache. tx = Transaction(r['result']) txid = r['params'][0] assert txid == cls._txid(tx.raw), "txid-is-sane-check" # protection against phony responses cls.tx_cache_put(tx=tx, txid=txid) # save tx to cache here except Exception as e: # response was not valid, ignore (don't cache) if txid: # txid may be '' if KeyError from r['result'] above bad_txids.add(txid) print_error("fetch_input_data: put_in_queue_and_cache fail for txid:", txid, repr(e)) for txid, l in need_dl_txids.items(): wallet.network.queue_request('blockchain.transaction.get', [txid], interface='random', callback=put_in_queue_and_cache) callback_funcs_to_cancel.add(put_in_queue_and_cache) q_ct += 1 def get_bh(): if eph.get('block_height'): return False lh = wallet.network.get_server_height() or wallet.get_local_height() def got_tx_info(r): q.put('block_height') # indicate to other thread we got the block_height reply from network try: confs = r.get('result').get('confirmations', 0) # will raise of error reply if confs and lh: # the whole point.. was to get this piece of data.. the block_height eph['block_height'] = bh = lh - confs + 1 print_error('fetch_input_data: got tx block height', bh) else: print_error('fetch_input_data: tx block height could not be determined') except Exception as e: print_error('fetch_input_data: get_bh fail:', str(e), r) if self_txid: wallet.network.queue_request('blockchain.transaction.get', [self_txid,True], interface=None, callback=got_tx_info) callback_funcs_to_cancel.add(got_tx_info) return True if get_bh(): q_ct += 1 class ErrorResp(Exception): pass for i in range(q_ct): # now, read the q back, with a 10 second timeout, and # populate the inputs try: r = q.get(timeout=10) if eph.get('_fetch') != t: # early abort from func, canceled break if r == 'block_height': # ignore block_height reply from network.. was already processed in other thread in got_tx_info above continue if r.get('error'): msg = r.get('error') if isinstance(msg, dict): msg = msg.get('message') or 'unknown error' raise ErrorResp(msg) rawhex = r['result'] txid = r['params'][0] assert txid not in bad_txids, "txid marked bad" # skip if was marked bad by our callback code tx = Transaction(rawhex); tx.deserialize() for item in need_dl_txids[txid]: ii, n = item assert n < len(tx.outputs()) outp = tx.outputs()[n] addr, value = outp[1], outp[2] inps[ii]['value'] = value inps[ii]['address'] = addr print_error("fetch_input_data: fetched from network", ii, addr, value) prog(i, q_ct) # tell interested code of progress except queue.Empty: print_error("fetch_input_data: timed out after 10.0s fetching from network, giving up.") break except Exception as e: print_error("fetch_input_data:", repr(e)) finally: # force-cancel any extant requests -- this is especially # crucial on error/timeout/failure. for func in callback_funcs_to_cancel: wallet.network.cancel_requests(func) if len(inps) == len(self._inputs) and eph.get('_fetch') == t: # sanity check eph.pop('_fetch', None) # potential race condition here, popping wrong t -- but in practice w/ CPython threading it won't matter print_error("fetch_input_data: elapsed {} sec".format(time.time()-t0)) if done_callback: done_callback(*done_args) # /doIt t = threading.Thread(target=doIt, daemon=True) eph['_fetch'] = t t.start() return True def fetched_inputs(self, *, require_complete=False): ''' Returns the complete list of asynchronously fetched inputs for this tx, if they exist. If the list is not yet fully retrieved, and require_complete == False, returns what it has so far (the returned list will always be exactly equal to len(self._inputs), with not-yet downloaded inputs coming from self._inputs and not necessarily containing a good 'address' or 'value'). If the download failed completely or was never started, will return the empty list []. Note that some inputs may still lack key: 'value' if there was a network error in retrieving them or if the download is still in progress.''' if self._inputs: ret = self.ephemeral.get('fetched_inputs') or [] diff = len(self._inputs) - len(ret) if diff > 0 and self.ephemeral.get('_fetch') and not require_complete: # in progress.. so return what we have so far return ret + self._inputs[len(ret):] elif diff == 0 and (not require_complete or not self.ephemeral.get('_fetch')): # finished *or* in-progress and require_complete==False return ret return [] def fetch_cancel(self) -> bool: ''' Cancels the currently-active running fetch operation, if any ''' return bool(self.ephemeral.pop('_fetch', None)) @classmethod def tx_cache_get(cls, txid : str) -> object: ''' Attempts to retrieve txid from the tx cache that this class keeps in-memory. Returns None on failure. The returned tx is not deserialized, and is a copy of the one in the cache. ''' tx = cls._fetched_tx_cache.get(txid) if tx is not None and tx.raw: # make sure to return a copy of the transaction from the cache # so that if caller does .deserialize(), *his* instance will # use up 10x memory consumption, and not the cached instance which # should just be an undeserialized raw tx. return Transaction(tx.raw) return None @classmethod def tx_cache_put(cls, tx : object, txid : str = None): ''' Puts a non-deserialized copy of tx into the tx_cache. ''' if not tx or not tx.raw: raise ValueError('Please pass a tx which has a valid .raw attribute!') txid = txid or cls._txid(tx.raw) # optionally, caller can pass-in txid to save CPU time for hashing cls._fetched_tx_cache.put(txid, Transaction(tx.raw)) def tx_from_str(txt): "json or raw hexadecimal" import json txt = txt.strip() if not txt: raise ValueError("empty string") try: bfh(txt) is_hex = True except: is_hex = False if is_hex: return txt tx_dict = json.loads(str(txt)) assert "hex" in tx_dict.keys() return tx_dict["hex"]
wsocket.py
#!/usr/bin/python # -*- coding: utf-8 -*- """ WSocket is a Simple WSGI Websocket Server, Framework, Middleware And App. It also offers a basic WSGI framework with routes handler, a built-in HTTP Server and event based websocket application. all in a single file and with no dependencies other than the Python Standard Library. Homepage and documentation: https://wsocket.gitbook.io Copyright (c) 2020, Kavindu Santhusa. License: MIT """ # Imports from __future__ import absolute_import, division, print_function from base64 import b64decode, b64encode from hashlib import sha1 from sys import version_info, exc_info from os import urandom from threading import Thread from time import sleep import traceback import logging import zlib import struct import socket from socket import error as socket_error from wsgiref.simple_server import make_server, ServerHandler, WSGIRequestHandler, WSGIServer try: # Py3 from socketserver import ThreadingMixIn from urllib.parse import urlencode except ImportError: # Py2 from SocketServer import ThreadingMixIn from urllib import urlencode try: import ssl except ImportError as e: ssl_err = e class ssl(): def __getattr__(self, name): raise ssl_err __author__ = "Kavindu Santhusa" __version__ = "2.1.1" __license__ = "MIT" __status__ = 4 # see setup.py logger = logging.getLogger(__name__) logging.basicConfig() # python compatability PY3 = version_info[0] >= 3 if PY3: import http.client as httplib from urllib.parse import urlparse text_type = str string_types = (str, ) range_type = range else: import httplib from urlparse import urlparse bytes = str text_type = unicode string_types = basestring range_type = xrange # websocket OPCODES OPCODE_CONTINUATION = 0x00 OPCODE_TEXT = 0x01 OPCODE_BINARY = 0x02 OPCODE_CLOSE = 0x08 OPCODE_PING = 0x09 OPCODE_PONG = 0x0A FIN_MASK = 0x80 OPCODE_MASK = 0x0F MASK_MASK = 0x80 LENGTH_MASK = 0x7F RSV0_MASK = 0x40 RSV1_MASK = 0x20 RSV2_MASK = 0x10 HEADER_FLAG_MASK = RSV0_MASK | RSV1_MASK | RSV2_MASK # default messages MSG_SOCKET_DEAD = "Socket is dead" MSG_ALREADY_CLOSED = "Connection is already closed" MSG_CLOSED = "Connection closed" # from bottlepy/bottle #: A dict to map HTTP status codes (e.g. 404) to phrases (e.g. 'Not Found') HTTP_CODES = httplib.responses.copy() HTTP_CODES[418] = "I'm a teapot" # RFC 2324 HTTP_CODES[428] = "Precondition Required" HTTP_CODES[429] = "Too Many Requests" HTTP_CODES[431] = "Request Header Fields Too Large" HTTP_CODES[451] = "Unavailable For Legal Reasons" # RFC 7725 HTTP_CODES[511] = "Network Authentication Required" _HTTP_STATUS_LINES = dict( (k, "%d %s" % (k, v)) for (k, v) in HTTP_CODES.items()) def log_traceback(ex): """generates error log from Exception object.""" if PY3: ex_traceback = ex.__traceback__ else: _, _, ex_traceback = exc_info() tb_lines = '' for line in traceback.format_exception(ex.__class__, ex, ex_traceback): tb_lines += str(line) return tb_lines class WebSocketError(socket_error): """ Base class for all websocket errors. """ pass class ProtocolError(WebSocketError): """ Raised if an error occurs when de/encoding the websocket protocol. """ pass class FrameTooLargeException(ProtocolError): """ Raised if a frame is received that is too large. """ pass class ThreadingWSGIServer(ThreadingMixIn, WSGIServer): """This class is identical to WSGIServer but uses threads to handle requests by using the ThreadingMixIn. This is useful to handle web browsers pre-opening sockets, on which Server would wait indefinitely. """ multithread = True daemon_threads = True class FixedServerHandler(ServerHandler): # fixed serverhandler http_version = "1.1" # http versions below 1.1 is not supported by some clients such as Firefox def _convert_string_type(self, value, title): # not in old versions of wsgiref """Convert/check value type.""" if isinstance(value, string_types): return value raise AssertionError("{0} must be of type str (got {1})".format( title, repr(value))) def start_response(self, status, headers, exc_info=None): """'start_response()' callable as specified by PEP 3333""" if exc_info: try: if self.headers_sent: # Re-raise original exception if headers sent raise exc_info[0](exc_info[1]).with_traceback(exc_info[2]) finally: exc_info = None # avoid dangling circular ref elif self.headers is not None: raise AssertionError("Headers already set!") self.status = status self.headers = self.headers_class(headers) status = self._convert_string_type(status, "Status") assert len(status) >= 4, "Status must be at least 4 characters" assert status[:3].isdigit(), "Status message must begin w/3-digit code" assert status[3] == " ", "Status message must have a space after code" if __debug__: for name, val in headers: name = self._convert_string_type(name, "Header name") val = self._convert_string_type(val, "Header value") # removed hop by hop headers check otherwise it raises AssertionError for Upgrade and Connection headers # assert not is_hop_by_hop( # name # ), "Hop-by-hop header, '{}: {}', not allowed".format(name, val) self.send_headers() return self.write class FixedHandler(WSGIRequestHandler): # fixed request handler def address_string(self): # Prevent reverse DNS lookups please. return self.client_address[0] def log_request(self, *args, **kw): if not self.quiet: return WSGIRequestHandler.log_request(self, *args, **kw) def get_app(self): return self.server.get_app() def handle(self ): # to add FixedServerHandler we had to override entire method """Handle a single HTTP request""" self.raw_requestline = self.rfile.readline(65537) if len(self.raw_requestline) > 65536: self.requestline = "" self.request_version = "" self.command = "" self.send_error(414) return if not self.parse_request(): # An error code has been sent, just exit return handler = FixedServerHandler(self.rfile, self.wfile, self.get_stderr(), self.get_environ()) handler.request_handler = self # backpointer for logging handler.run(self.get_app()) class WebSocket(object): """ Base class for supporting websocket operations. """ origin = None protocol = None version = None path = None logger = logger def __init__(self, environ, read, write, handler, do_compress): self.environ = environ self.closed = False self.write = write self.read = read self.handler = handler self.do_compress = do_compress self.origin = self.environ.get( "HTTP_SEC_WEBSOCKET_ORIGIN") or self.environ.get("HTTP_ORIGIN") self.protocols = list( map(str.strip, self.environ.get("HTTP_SEC_WEBSOCKET_PROTOCOL", "").split(","))) self.version = int( self.environ.get("HTTP_SEC_WEBSOCKET_VERSION", "0").strip()) self.path = self.environ.get("PATH_INFO", "/") if do_compress: self.compressor = zlib.compressobj(7, zlib.DEFLATED, -zlib.MAX_WBITS) self.decompressor = zlib.decompressobj(-zlib.MAX_WBITS) def __del__(self): try: self.close() except: # close() may fail if __init__ didn't complete pass def _decode_bytes(self, bytestring): if not bytestring: return "" try: return bytestring.decode("utf-8") except UnicodeDecodeError as e: print('UnicodeDecodeError') self.close(1007, str(e)) raise def _encode_bytes(self, text): if not isinstance(text, str): text = text_type(text or "") return text.encode("utf-8") def _is_valid_close_code(self, code): # valid hybi close code? if (code < 1000 or 1004 <= code <= 1006 or 1012 <= code <= 1016 or code == 1100 # not sure about this one but the autobahn fuzzer requires it. or 2000 <= code <= 2999): return False return True def handle_close(self, payload): if not payload: self.close(1000, "") return if len(payload) < 2: raise ProtocolError("Invalid close frame: %s" % payload) code = struct.unpack("!H", payload[:2])[0] payload = payload[2:] if payload: payload.decode("utf-8") if not self._is_valid_close_code(code): raise ProtocolError("Invalid close code %s" % code) self.close(code, payload) def handle_ping(self, payload): self.send_frame(payload, self.OPCODE_PONG) def handle_pong(self, payload): pass def mask_payload(self, mask, length, payload): payload = bytearray(payload) mask = bytearray(mask) for i in range_type(length): payload[i] ^= mask[i % 4] return payload def read_message(self): opcode = None message = bytearray() while True: data = self.read(2) if len(data) != 2: first_byte, second_byte = 0, 0 else: first_byte, second_byte = struct.unpack("!BB", data) fin = first_byte & FIN_MASK f_opcode = first_byte & OPCODE_MASK flags = first_byte & HEADER_FLAG_MASK length = second_byte & LENGTH_MASK has_mask = second_byte & MASK_MASK == MASK_MASK if f_opcode > 0x07: if not fin: raise ProtocolError( "Received fragmented control frame: {0!r}".format( data)) # Control frames MUST have a payload length of 125 bytes or less if length > 125: raise FrameTooLargeException( "Control frame cannot be larger than 125 bytes: " "{0!r}".format(data)) if length == 126: # 16 bit length data = self.read(2) if len(data) != 2: raise WebSocketError( "Unexpected EOF while decoding header") length = struct.unpack("!H", data)[0] elif length == 127: # 64 bit length data = self.read(8) if len(data) != 8: raise WebSocketError( "Unexpected EOF while decoding header") length = struct.unpack("!Q", data)[0] if has_mask: mask = self.read(4) if len(mask) != 4: raise WebSocketError( "Unexpected EOF while decoding header") if self.do_compress and (flags & RSV0_MASK): flags &= ~RSV0_MASK compressed = True else: compressed = False if flags: raise ProtocolError(str(flags)) if not length: payload = b"" else: try: payload = self.read(length) except socket.error: payload = b"" except Exception: raise WebSocketError("Could not read payload") if len(payload) != length: raise WebSocketError( "Unexpected EOF reading frame payload") if has_mask: payload = self.mask_payload(mask, length, payload) if compressed: payload = b"".join(( self.decompressor.decompress(bytes(payload)), self.decompressor.decompress(b"\0\0\xff\xff"), self.decompressor.flush(), )) if f_opcode in (OPCODE_TEXT, OPCODE_BINARY): # a new frame if opcode: raise ProtocolError("The opcode in non-fin frame is " "expected to be zero, got " "{0!r}".format(f_opcode)) opcode = f_opcode elif f_opcode == OPCODE_CONTINUATION: if not opcode: raise ProtocolError("Unexpected frame with opcode=0") elif f_opcode == OPCODE_PING: self.handle_ping(payload) continue elif f_opcode == OPCODE_PONG: self.handle_pong(payload) continue elif f_opcode == OPCODE_CLOSE: print('opcode close') self.handle_close(payload) return else: raise ProtocolError("Unexpected opcode={0!r}".format(f_opcode)) if opcode == OPCODE_TEXT: payload.decode("utf-8") message += payload if fin: break if opcode == OPCODE_TEXT: return self._decode_bytes(message) else: return message def receive(self): """ Read and return a message from the stream. If `None` is returned, then the socket is considered closed/errored. """ if self.closed: print('receive closed') self.handler.on_close(MSG_ALREADY_CLOSED) raise WebSocketError(MSG_ALREADY_CLOSED) try: return self.read_message() except UnicodeError as e: print('UnicodeDecodeError') self.close(1007, str(e).encode()) except ProtocolError as e: print('Protocol err', e) self.close(1002, str(e).encode()) except socket.timeout as e: print('timeout') self.close(message=str(e)) self.handler.on_close(MSG_CLOSED) except socket.error as e: print('spcket err') self.close(message=str(e)) self.handler.on_close(MSG_CLOSED) return None def encode_header(self, fin, opcode, mask, length, flags): first_byte = opcode second_byte = 0 extra = b"" result = bytearray() if fin: first_byte |= FIN_MASK if flags & RSV0_MASK: first_byte |= RSV0_MASK if flags & RSV1_MASK: first_byte |= RSV1_MASK if flags & RSV2_MASK: first_byte |= RSV2_MASK if length < 126: second_byte += length elif length <= 0xFFFF: second_byte += 126 extra = struct.pack("!H", length) elif length <= 0xFFFFFFFFFFFFFFFF: second_byte += 127 extra = struct.pack("!Q", length) else: raise FrameTooLargeException if mask: second_byte |= MASK_MASK result.append(first_byte) result.append(second_byte) result.extend(extra) if mask: result.extend(mask) return result def send_frame(self, message, opcode, do_compress=False): if self.closed: print('receive closed') self.handler.on_close(MSG_ALREADY_CLOSED) raise WebSocketError(MSG_ALREADY_CLOSED) if not message: return if opcode in (OPCODE_TEXT, OPCODE_PING): message = self._encode_bytes(message) elif opcode == OPCODE_BINARY: message = bytes(message) if do_compress and self.do_compress: message = self.compressor.compress(message) message += self.compressor.flush(zlib.Z_SYNC_FLUSH) if message.endswith(b"\x00\x00\xff\xff"): message = message[:-4] flags = RSV0_MASK else: flags = 0 header = self.encode_header(True, opcode, b"", len(message), flags) try: self.write(bytes(header + message)) except socket.error as e: raise WebSocketError(MSG_SOCKET_DEAD + " : " + str(e)) def send(self, message, binary=None, do_compress=True): """ Send a frame over the websocket with message as its payload """ if binary is None: binary = not isinstance(message, string_types) opcode = OPCODE_BINARY if binary else OPCODE_TEXT try: self.send_frame(message, opcode, do_compress) except WebSocketError: self.handler.on_close(MSG_SOCKET_DEAD) raise WebSocketError(MSG_SOCKET_DEAD) def close(self, code=1000, message=b""): """ Close the websocket and connection, sending the specified code and message. The underlying socket object is _not_ closed, that is the responsibility of the initiator. """ print("close called") if self.closed: print('receive closed') self.handler.on_close(MSG_ALREADY_CLOSED) try: message = self._encode_bytes(message) self.send_frame(struct.pack("!H%ds" % len(message), code, message), opcode=OPCODE_CLOSE) except WebSocketError: self.logger.debug( "Failed to write closing frame -> closing socket") finally: self.logger.debug("Closed WebSocket") self.closed = True self.write = None self.read = None self.environ = None class Response(object): # Header blacklist for specific response codes # (rfc2616 section 10.2.3 and 10.3.5) bad_headers = { 204: frozenset(("Content-Type", "Content-Length")), 304: frozenset(( "Allow", "Content-Encoding", "Content-Language", "Content-Length", "Content-Range", "Content-Type", "Content-Md5", "Last-Modified", )), } headers_sent = False def __init__(self, environ, start_response, app): self.environ = environ self._start_response = start_response self.app = app def process_response(self, allow_write=True): try: results = self.app(self.environ, self.start_response) except Exception as e: self.start_response() log = log_traceback(e) err = "<h1>Internal Server Error(500)</h1><p><b>%s :%s</b></p><p><samp><pre>%s</pre></samp></p><a href=\"https://github.com/Ksengine/wsocket/issues/new?%s\" target=\"blank\"><button><h3>report</h3></button></a>" % ( type(e).__name__, str(e), log, urlencode({ 'title': type(e).__name__, 'body': '```python\n' + log + '\n```' })) return [err.encode("utf-8")] if not allow_write: return [] if isinstance(results, string_types): return [results.encode("utf-8")] elif isinstance(results, bytes): return [results] elif hasattr(results, "__iter__"): while not self.headers_sent: pass for result in results: if isinstance(result, string_types): self.write(result.encode("utf-8")) elif isinstance(result, bytes): self.write(result) else: self.write(str(result).encode("utf-8")) return [] else: return [str(result).encode("utf-8")] def start_response(self, status="200 OK", headers=[]): if self.headers_sent: return status = self.process_status(status) if isinstance(headers, dict): headers = list(headers.items()) if self.code in self.bad_headers: bad_headers = self.bad_headers[self.code] headers = [h for h in headers if h[0] not in bad_headers] self.write = self._start_response(status, headers) self.headers_sent = True return self.write def process_status(self, status): if isinstance(status, int): code, status = status, _HTTP_STATUS_LINES.get(status) elif " " in status: if "\n" in status or "\r" in status or "\0" in status: raise ValueError("Status line must not include control chars.") status = status.strip() code = int(status.split()[0]) else: raise ValueError("String status line without a reason phrase.") if not 100 <= code <= 999: raise ValueError("Status code out of range.") self.code = code return str(status or ("%d Unknown" % code)) class Event: def __init__(self, default=None): self._items = [] self.default = default def __call__(self, *args, **kwargs): def execute(): for func in self._items: try: func(*args, **kwargs) except Exception as e: logger.exception(e) if not len(self._items): if self.default: t = Thread(target=self.default, args=args, kwargs=kwargs) t.start() return else: return t = Thread(target=execute) t.start() def clear(self): self._items = [] def __add__(self, item): self._items.append(item) return self def __sub__(self, item): self._items.remove(item) return self def __iadd__(self, item): self._items.append(item) return self def __isub__(self, item): self._items.remove(item) return self class WSocketApp: SUPPORTED_VERSIONS = ("13", "8", "7") GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" websocket_class = WebSocket send = None routes = {} def __init__(self, app=None, protocols=[]): self.protocols = protocols if isinstance(protocols, (list, tuple, set)) else [protocols] self.app = app or self.wsgi self.onclose = Event(self.on_close) self.onmessage = Event(self.on_message) self.onconnect = Event(self.on_connect) def on_close(self, message): print(message) def on_connect(self, client): print(client) client.send('you connected') def fake(*args, **kwargs): pass def on_message(self, message, client): print(repr(message)) try: client.send("you said: " + message) sleep(2) client.send("you said: " + message) except WebSocketError: pass def route(self, r): def decorator(callback): self.routes[r] = callback return callback return decorator def not_found(self, environ, start_response): start_response(404) return "<h1>Page Not Found(404)</h1><p><b>%s</b></p>" % ( environ.get("PATH_INFO") + "?" + environ.get("QUERY_STRING", "\b")) def wsgi(self, environ, start_response): if len(self.routes): for route in self.routes: if route == environ.get("PATH_INFO"): r = Response(environ, start_response, self.routes[route]) return r.process_response() if route.endswith("*") and environ.get( "PATH_INFO", "").startswith(route[:-1]): r = Response(environ, start_response, self.routes[route]) return r.process_response() r = Response(environ, start_response, self.not_found) return r.process_response() wsock = environ.get("wsgi.websocket") if not wsock: start_response() return "<h1>Hello World!</h1>" self.onconnect(wsock) while True: try: message = wsock.receive() if message != None: self.onmessage(message, wsock) except WebSocketError as e: break return [] def __call__(self, environ, start_response): if "wsgi.websocket" in environ or environ.get("REQUEST_METHOD", "") != "GET": r = Response(environ, start_response, self.app) return r.process_response() # Upgrade # Connection if "websocket" not in map( str.strip, environ.get("HTTP_UPGRADE", "").lower().split(",")) or "upgrade" not in map( str.strip, environ.get("HTTP_CONNECTION", "").lower().split(",")): r = Response(environ, start_response, self.app) return r.process_response() # Sec-WebSocket-Version PLUS determine mode: Hybi or Hixie if "HTTP_SEC_WEBSOCKET_VERSION" not in environ: logger.warning( "WebSocket connection denied - Hixie76 protocol not supported." ) start_response( "426 Upgrade Required", [("Sec-WebSocket-Version", ", ".join(self.SUPPORTED_VERSIONS)) ], ) return [b"No Websocket protocol version defined"] version = environ.get("HTTP_SEC_WEBSOCKET_VERSION") # respond with list of supported versions (descending order) if version not in self.SUPPORTED_VERSIONS: msg = "Unsupported WebSocket Version: %s" % version logger.warning(msg) start_response( "400 Bad Request", [("Sec-WebSocket-Version", ", ".join(self.SUPPORTED_VERSIONS)) ], ) return [msg.encode()] key = environ.get("HTTP_SEC_WEBSOCKET_KEY", "").strip() if not len(key): msg = "Sec-WebSocket-Key header is missing/empty" logger.warning(msg) start_response("400 Bad Request", []) return [msg.encode()] try: key_len = len(b64decode(key)) except TypeError: msg = "Invalid key: %s" % key logger.warning(msg) start_response("400 Bad Request", []) return [msg.encode()] if key_len != 16: msg = "Invalid key: %s" % key logger.warning(msg) start_response("400 Bad Request", []) return [msg.encode] # Sec-WebSocket-Protocol requested_protocols = list( map(str.strip, environ.get("HTTP_SEC_WEBSOCKET_PROTOCOL", "").split(","))) protocols = None protocols = set(requested_protocols) and set(self.protocols) logger.debug("Protocols allowed: {0}".format(", ".join(protocols))) extensions = list( map(lambda ext: ext.split(";")[0].strip(), environ.get("HTTP_SEC_WEBSOCKET_EXTENSIONS", "").split(","))) do_compress = "permessage-deflate" in extensions if PY3: accept = b64encode( sha1((key + self.GUID).encode("latin-1")).digest()).decode("latin-1") else: accept = b64encode(sha1(key + self.GUID).digest()) headers = [ ("Upgrade", "websocket"), ("Connection", "Upgrade"), ("Sec-WebSocket-Accept", accept), ] if do_compress: headers.append(("Sec-WebSocket-Extensions", "permessage-deflate")) if protocols: headers.append(("Sec-WebSocket-Protocol", ", ".join(protocols))) logger.debug("WebSocket request accepted, switching protocols") write = start_response("101 Switching Protocols", headers) read = environ["wsgi.input"].read write(b"") websocket = self.websocket_class(environ, read, write, self, do_compress) environ.update({ "wsgi.websocket_version": version, "wsgi.websocket": websocket }) r = Response(environ, start_response, self.app) r.start_response = self.fake return r.process_response(False) # for version compat class WebSocketHandler(FixedHandler): def get_app(self): return WSocketApp(self.server.get_app()) WSocketHandler = WebSocketHandler class WSocketServer(ThreadingWSGIServer): def set_app(self, app, *args, **kwargs): ThreadingWSGIServer.set_app(self, WSocketApp(app), *args, **kwargs) def run(app=WSocketApp(), host="127.0.0.1", port=8080, **options): handler_cls = options.get("handler_class", FixedHandler) server_cls = options.get("server_class", ThreadingWSGIServer) if ":" in host: # Fix wsgiref for IPv6 addresses. if getattr(server_cls, "address_family") == socket.AF_INET: class server_cls(server_cls): address_family = socket.AF_INET6 srv = make_server(host, port, app, server_cls, handler_cls) port = srv.server_port # update port actual port (0 means random) print("Server started at http://%s:%i." % (host, port)) try: srv.serve_forever() except KeyboardInterrupt: print("\nServer stopped.") srv.server_close() # Prevent ResourceWarning: unclosed socket if __name__ == "__main__": run()
server.py
import socket from threading import Thread conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM) conn.bind(('127.0.0.1', 5000)) conn.listen(10) def send(message, sender): for client in clients: if sender != client: client.send(message) def listen(client): while True: message = client.recv(1024) send(message, client) clients = [] while True: new_client, _ = conn.accept() if new_client not in clients: clients.append(new_client) Thread(target=listen, args=[new_client]).start()
Lauren.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import time from src import InstaBot from src.check_status import check_status from src.feed_scanner import feed_scanner from src.follow_protocol import follow_protocol from src.unfollow_protocol import unfollow_protocol from multiprocessing import Process from multiprocessing import Pool def the_bot(id, passw): bot = InstaBot( login=id, password=passw, like_per_day=1920, comments_per_day=0, tag_list=['lifestyle', 'love', 'instagood', 'picoftheday', 'photograph', '#all_shots', 'floridastateuniversity', 'floirda', 'college', 'floridastateuniversity', 'fsu', 'fashion', 'fashionlover', 'stylish', 'bloggerstyle', 'noles', 'nyc', 'model', 'girlsofinstagram', 'fashiongram'], tag_blacklist=['rain', 'thunderstorm'], user_blacklist={}, max_like_for_one_tag=220, follow_per_day=0, follow_time=1 * 5, unfollow_per_day=0, unfollow_break_min=15, unfollow_break_max=30, log_mod=0, proxy='', # List of list of words, each of which will be used to generate comment # For example: "This shot feels wow!" comment_list=[["this", "the", "your"], ["photo", "picture", "pic", "shot", "snapshot"], ["is", "looks", "feels", "is really"], ["great", "super", "good", "very good", "good", "wow", "WOW", "cool", "GREAT","magnificent", "magical", "very cool", "stylish", "beautiful", "so beautiful", "so stylish", "so professional", "lovely", "so lovely", "very lovely", "glorious","so glorious", "very glorious", "adorable", "excellent", "amazing"], [".", "..", "...", "!", "!!", "!!!"]], # Use unwanted_username_list to block usernames containing a string ## Will do partial matches; i.e. 'mozart' will block 'legend_mozart' ### 'free_followers' will be blocked because it contains 'free' unwanted_username_list=[ 'second', 'stuff', 'art', 'project', 'love', 'life', 'food', 'blog', 'free', 'keren', 'photo', 'graphy', 'indo', 'travel', 'art', 'shop', 'store', 'sex', 'toko', 'jual', 'online', 'murah', 'jam', 'kaos', 'case', 'baju', 'fashion', 'corp', 'tas', 'butik', 'grosir', 'karpet', 'sosis', 'salon', 'skin', 'care', 'cloth', 'tech', 'rental', 'kamera', 'beauty', 'express', 'kredit', 'collection', 'impor', 'preloved', 'follow', 'follower', 'gain', '.id', '_id', 'bags' ], unfollow_whitelist=['example_user_1', 'example_user_2']) while True: #print("# MODE 0 = ORIGINAL MODE BY LEVPASHA") #print("## MODE 1 = MODIFIED MODE BY KEMONG") #print("### MODE 2 = ORIGINAL MODE + UNFOLLOW WHO DON'T FOLLOW BACK") #print("#### MODE 3 = MODIFIED MODE : UNFOLLOW USERS WHO DON'T FOLLOW YOU BASED ON RECENT FEED") #print("##### MODE 4 = MODIFIED MODE : FOLLOW USERS BASED ON RECENT FEED ONLY") #print("###### MODE 5 = MODIFIED MODE : JUST UNFOLLOW EVERYBODY, EITHER YOUR FOLLOWER OR NOT") ################################ ## WARNING ### ################################ # DON'T USE MODE 5 FOR A LONG PERIOD. YOU RISK YOUR ACCOUNT FROM GETTING BANNED ## USE MODE 5 IN BURST MODE, USE IT TO UNFOLLOW PEOPLE AS MANY AS YOU WANT IN SHORT TIME PERIOD mode = 0 #print("You choose mode : %i" %(mode)) #print("CTRL + C to cancel this operation or wait 30 seconds to start") #time.sleep(30) if mode == 0: bot.new_auto_mod() elif mode == 1: check_status(bot) while bot.self_following - bot.self_follower > 200: unfollow_protocol(bot) time.sleep(10 * 60) check_status(bot) while bot.self_following - bot.self_follower < 400: while len(bot.user_info_list) < 50: feed_scanner(bot) time.sleep(5 * 60) follow_protocol(bot) time.sleep(10 * 60) check_status(bot) elif mode == 2: bot.bot_mode = 1 bot.new_auto_mod() elif mode == 3: unfollow_protocol(bot) time.sleep(10 * 60) elif mode == 4: feed_scanner(bot) time.sleep(60) follow_protocol(bot) time.sleep(10 * 60) elif mode == 5: bot.bot_mode = 2 unfollow_protocol(bot) else: print("Wrong mode!") p = Pool() p.starmap(the_bot, [("Laurenschwec", "testtest23")]) #p = Process(target=the_bot, args=["LaurenSchwec", "testtest23"]) #p.start() #p.join() #pool.apply_async(the_bot)
engine.py
# encoding: UTF-8 # 系统模块 from __future__ import print_function from __future__ import absolute_import try: import queue except ImportError: import Queue as queue from threading import Thread from time import sleep from collections import defaultdict # 第三方模块 # TODO: add timer # from qtpy.QtCore import QTimer # 自己开发的模块 from .eventtype import EVENT_TYPE ######################################################################## class EventEngine(object): """ 事件驱动引擎 事件驱动引擎中所有的变量都设置为了私有,这是为了防止不小心 从外部修改了这些变量的值或状态,导致bug。 变量说明 __queue:私有变量,事件队列 __active:私有变量,事件引擎开关 __thread:私有变量,事件处理线程 __timer:私有变量,计时器 __handlers:私有变量,事件处理函数字典 方法说明 __run: 私有方法,事件处理线程连续运行用 __process: 私有方法,处理事件,调用注册在引擎中的监听函数 __onTimer:私有方法,计时器固定事件间隔触发后,向事件队列中存入计时器事件 start: 公共方法,启动引擎 stop:公共方法,停止引擎 register:公共方法,向引擎中注册监听函数 unregister:公共方法,向引擎中注销监听函数 put:公共方法,向事件队列中存入新的事件 事件监听函数必须定义为输入参数仅为一个event对象,即: 函数 def func(event) ... 对象方法 def method(self, event) ... """ #---------------------------------------------------------------------- def __init__(self): super(EventEngine, self).__init__() """初始化事件引擎""" # 事件队列 self.__queue = queue.Queue() # 事件引擎开关 self.__active = False # 事件处理线程 self.__thread = Thread(target = self.__run) # 计时器,用于触发计时器事件 self.__timer = None # QTimer() # self.__timer.timeout.connect(self.__onTimer) # 这里的__handlers是一个字典,用来保存对应的事件调用关系 # 其中每个键对应的值是一个列表,列表中保存了对该事件进行监听的函数功能 self.__handlers = defaultdict(list) # __generalHandlers是一个列表,用来保存通用回调函数(所有事件均调用) self.__generalHandlers = [] #---------------------------------------------------------------------- def __run(self): """引擎运行""" while self.__active == True: try: event = self.__queue.get(block = True, timeout = 1) # 获取事件的阻塞时间设为1秒 self.__process(event) except queue.Empty: pass #---------------------------------------------------------------------- def __process(self, event): """处理事件""" # 检查是否存在对该事件进行监听的处理函数 if event.type_ in self.__handlers: # 若存在,则按顺序将事件传递给处理函数执行 [handler(event) for handler in self.__handlers[event.type_]] # 以上语句为Python列表解析方式的写法,对应的常规循环写法为: #for handler in self.__handlers[event.type_]: #handler(event) # 调用通用处理函数进行处理 if self.__generalHandlers: [handler(event) for handler in self.__generalHandlers] #---------------------------------------------------------------------- def __onTimer(self): """向事件队列中存入计时器事件""" # 创建计时器事件 event = Event(type_=EVENT_TYPE.TIMER) # 向队列中存入计时器事件 self.put(event) #---------------------------------------------------------------------- def start(self, timer=True): """ 引擎启动 timer:是否要启动计时器 """ # 将引擎设为启动 self.__active = True # 启动事件处理线程 self.__thread.start() # 启动计时器,计时器事件间隔默认设定为1秒 if timer: self.__timer.start(1000) #---------------------------------------------------------------------- def stop(self): """停止引擎""" # 将引擎设为停止 self.__active = False # 停止计时器 if self.__timer: self.__timer.stop() # 等待事件处理线程退出 self.__thread.join() #---------------------------------------------------------------------- def register(self, type_, handler): """注册事件处理函数监听""" # 尝试获取该事件类型对应的处理函数列表,若无defaultDict会自动创建新的list handlerList = self.__handlers[type_] # 若要注册的处理器不在该事件的处理器列表中,则注册该事件 if handler not in handlerList: handlerList.append(handler) #---------------------------------------------------------------------- def unregister(self, type_, handler): """注销事件处理函数监听""" # 尝试获取该事件类型对应的处理函数列表,若无则忽略该次注销请求 handlerList = self.__handlers[type_] # 如果该函数存在于列表中,则移除 if handler in handlerList: handlerList.remove(handler) # 如果函数列表为空,则从引擎中移除该事件类型 if not handlerList: del self.__handlers[type_] #---------------------------------------------------------------------- def put(self, event): """向事件队列中存入事件""" self.__queue.put(event) #---------------------------------------------------------------------- def registerGeneralHandler(self, handler): """注册通用事件处理函数监听""" if handler not in self.__generalHandlers: self.__generalHandlers.append(handler) #---------------------------------------------------------------------- def unregisterGeneralHandler(self, handler): """注销通用事件处理函数监听""" if handler in self.__generalHandlers: self.__generalHandlers.remove(handler) ######################################################################## class EventEngine2(object): """ 计时器使用python线程的事件驱动引擎 """ #---------------------------------------------------------------------- def __init__(self): """初始化事件引擎""" # 事件队列 self.__queue = queue.Queue() # 事件引擎开关 self.__active = False # 事件处理线程 self.__thread = Thread(target = self.__run) # 计时器,用于触发计时器事件 self.__timer = Thread(target = self.__runTimer) self.__timerActive = False # 计时器工作状态 self.__timerSleep = 1 # 计时器触发间隔(默认1秒) # 这里的__handlers是一个字典,用来保存对应的事件调用关系 # 其中每个键对应的值是一个列表,列表中保存了对该事件进行监听的函数功能 self.__handlers = defaultdict(list) # __generalHandlers是一个列表,用来保存通用回调函数(所有事件均调用) self.__generalHandlers = [] #---------------------------------------------------------------------- def __run(self): """引擎运行""" while self.__active == True: try: event = self.__queue.get(block = True, timeout = 1) # 获取事件的阻塞时间设为1秒 self.__process(event) except queue.Empty: pass #---------------------------------------------------------------------- def __process(self, event): """处理事件""" # 检查是否存在对该事件进行监听的处理函数 if event.type_ in self.__handlers: # 若存在,则按顺序将事件传递给处理函数执行 [handler(event) for handler in self.__handlers[event.type_]] # 以上语句为Python列表解析方式的写法,对应的常规循环写法为: #for handler in self.__handlers[event.type_]: #handler(event) # 调用通用处理函数进行处理 if self.__generalHandlers: [handler(event) for handler in self.__generalHandlers] #---------------------------------------------------------------------- def __runTimer(self): """运行在计时器线程中的循环函数""" while self.__timerActive: # 创建计时器事件 event = Event(type_=EVENT_TYPE.TIMER) # 向队列中存入计时器事件 self.put(event) # 等待 sleep(self.__timerSleep) #---------------------------------------------------------------------- def start(self, timer=True): """ 引擎启动 timer:是否要启动计时器 """ # 将引擎设为启动 self.__active = True # 启动事件处理线程 self.__thread.start() # 启动计时器,计时器事件间隔默认设定为1秒 if timer: self.__timerActive = True self.__timer.start() #---------------------------------------------------------------------- def stop(self): """停止引擎""" # 将引擎设为停止 self.__active = False # 停止计时器 self.__timerActive = False self.__timer.join() # 等待事件处理线程退出 self.__thread.join() #---------------------------------------------------------------------- def register(self, type_, handler): """注册事件处理函数监听""" # 尝试获取该事件类型对应的处理函数列表,若无defaultDict会自动创建新的list handlerList = self.__handlers[type_] # 若要注册的处理器不在该事件的处理器列表中,则注册该事件 if handler not in handlerList: handlerList.append(handler) #---------------------------------------------------------------------- def unregister(self, type_, handler): """注销事件处理函数监听""" # 尝试获取该事件类型对应的处理函数列表,若无则忽略该次注销请求 handlerList = self.__handlers[type_] # 如果该函数存在于列表中,则移除 if handler in handlerList: handlerList.remove(handler) # 如果函数列表为空,则从引擎中移除该事件类型 if not handlerList: del self.__handlers[type_] #---------------------------------------------------------------------- def put(self, event): """向事件队列中存入事件""" self.__queue.put(event) #---------------------------------------------------------------------- def registerGeneralHandler(self, handler): """注册通用事件处理函数监听""" if handler not in self.__generalHandlers: self.__generalHandlers.append(handler) #---------------------------------------------------------------------- def unregisterGeneralHandler(self, handler): """注销通用事件处理函数监听""" if handler in self.__generalHandlers: self.__generalHandlers.remove(handler) ######################################################################## class Event: """ Event is a class used to represent an event happended. Attributes ---------- type_ : str dic : dict """ def __init__(self, type_=None): """Constructor""" self.type_ = type_ # 事件类型 self.dic = {} # 字典用于保存具体的事件数据 def __repr__(self): "Event [{0:s}] with data {1:}".format(self.type_, list(self.dic.keys())[:10]) def __str__(self): return self.__repr__() #---------------------------------------------------------------------- def test(): """测试函数""" # import sys from datetime import datetime import time # from PyQt4.QtCore import QCoreApplication def simpletest(event): print(u'处理每秒触发的计时器事件:%s' % str(datetime.now())) # app = QCoreApplication(sys.argv) def my_general_handler(event): print("General event type {}".format(event.type_)) ee = EventEngine2() # ee.register(EVENT_TIMER, simpletest) ee.registerGeneralHandler(simpletest) ee.start() ee.registerGeneralHandler(my_general_handler) ee.put(Event('blabla')) time.sleep(3) ee.stop() # app.exec_() # 直接运行脚本可以进行测试 if __name__ == '__main__': test()
tunnel.py
"""Basic ssh tunnel utilities, and convenience functions for tunneling zeromq connections. """ # Copyright (C) 2010-2011 IPython Development Team # Copyright (C) 2011- PyZMQ Developers # # Redistributed from IPython under the terms of the BSD License. from __future__ import print_function import atexit import os import re import signal import socket import sys import warnings from getpass import getpass, getuser from typing import Type from multiprocessing import Process try: with warnings.catch_warnings(): warnings.simplefilter('ignore', DeprecationWarning) import paramiko SSHException = paramiko.ssh_exception.SSHException except ImportError: paramiko = None # type: ignore class SSHException(Exception): # type: ignore pass else: from .forward import forward_tunnel try: import pexpect except ImportError: pexpect = None from ..utils.strtypes import b def select_random_ports(n): """Select and return n random ports that are available.""" ports = [] sockets = [] for i in range(n): sock = socket.socket() sock.bind(('', 0)) ports.append(sock.getsockname()[1]) sockets.append(sock) for sock in sockets: sock.close() return ports # ----------------------------------------------------------------------------- # Check for passwordless login # ----------------------------------------------------------------------------- _password_pat = re.compile(b(r'pass(word|phrase):'), re.IGNORECASE) def try_passwordless_ssh(server, keyfile, paramiko=None): """Attempt to make an ssh connection without a password. This is mainly used for requiring password input only once when many tunnels may be connected to the same server. If paramiko is None, the default for the platform is chosen. """ if paramiko is None: paramiko = sys.platform == 'win32' if not paramiko: f = _try_passwordless_openssh else: f = _try_passwordless_paramiko return f(server, keyfile) def _try_passwordless_openssh(server, keyfile): """Try passwordless login with shell ssh command.""" if pexpect is None: raise ImportError("pexpect unavailable, use paramiko") cmd = 'ssh -f ' + server if keyfile: cmd += ' -i ' + keyfile cmd += ' exit' # pop SSH_ASKPASS from env env = os.environ.copy() env.pop('SSH_ASKPASS', None) ssh_newkey = 'Are you sure you want to continue connecting' p = pexpect.spawn(cmd, env=env) while True: try: i = p.expect([ssh_newkey, _password_pat], timeout=0.1) if i == 0: raise SSHException( 'The authenticity of the host can\'t be established.' ) except pexpect.TIMEOUT: continue except pexpect.EOF: return True else: return False def _try_passwordless_paramiko(server, keyfile): """Try passwordless login with paramiko.""" if paramiko is None: msg = "Paramiko unavailable, " if sys.platform == 'win32': msg += "Paramiko is required for ssh tunneled connections on Windows." else: msg += "use OpenSSH." raise ImportError(msg) username, server, port = _split_server(server) client = paramiko.SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.WarningPolicy()) try: client.connect( server, port, username=username, key_filename=keyfile, look_for_keys=True ) except paramiko.AuthenticationException: return False else: client.close() return True def tunnel_connection( socket, addr, server, keyfile=None, password=None, paramiko=None, timeout=60 ): """Connect a socket to an address via an ssh tunnel. This is a wrapper for socket.connect(addr), when addr is not accessible from the local machine. It simply creates an ssh tunnel using the remaining args, and calls socket.connect('tcp://localhost:lport') where lport is the randomly selected local port of the tunnel. """ new_url, tunnel = open_tunnel( addr, server, keyfile=keyfile, password=password, paramiko=paramiko, timeout=timeout, ) socket.connect(new_url) return tunnel def open_tunnel(addr, server, keyfile=None, password=None, paramiko=None, timeout=60): """Open a tunneled connection from a 0MQ url. For use inside tunnel_connection. Returns ------- (url, tunnel) : (str, object) The 0MQ url that has been forwarded, and the tunnel object """ lport = select_random_ports(1)[0] transport, addr = addr.split('://') ip, rport = addr.split(':') rport = int(rport) if paramiko is None: paramiko = sys.platform == 'win32' if paramiko: tunnelf = paramiko_tunnel else: tunnelf = openssh_tunnel tunnel = tunnelf( lport, rport, server, remoteip=ip, keyfile=keyfile, password=password, timeout=timeout, ) return 'tcp://127.0.0.1:%i' % lport, tunnel def openssh_tunnel( lport, rport, server, remoteip='127.0.0.1', keyfile=None, password=None, timeout=60 ): """Create an ssh tunnel using command-line ssh that connects port lport on this machine to localhost:rport on server. The tunnel will automatically close when not in use, remaining open for a minimum of timeout seconds for an initial connection. This creates a tunnel redirecting `localhost:lport` to `remoteip:rport`, as seen from `server`. keyfile and password may be specified, but ssh config is checked for defaults. Parameters ---------- lport : int local port for connecting to the tunnel from this machine. rport : int port on the remote machine to connect to. server : str The ssh server to connect to. The full ssh server string will be parsed. user@server:port remoteip : str [Default: 127.0.0.1] The remote ip, specifying the destination of the tunnel. Default is localhost, which means that the tunnel would redirect localhost:lport on this machine to localhost:rport on the *server*. keyfile : str; path to public key file This specifies a key to be used in ssh login, default None. Regular default ssh keys will be used without specifying this argument. password : str; Your ssh password to the ssh server. Note that if this is left None, you will be prompted for it if passwordless key based login is unavailable. timeout : int [default: 60] The time (in seconds) after which no activity will result in the tunnel closing. This prevents orphaned tunnels from running forever. """ if pexpect is None: raise ImportError("pexpect unavailable, use paramiko_tunnel") ssh = "ssh " if keyfile: ssh += "-i " + keyfile if ':' in server: server, port = server.split(':') ssh += " -p %s" % port cmd = "%s -O check %s" % (ssh, server) (output, exitstatus) = pexpect.run(cmd, withexitstatus=True) if not exitstatus: pid = int(output[output.find(b"(pid=") + 5 : output.find(b")")]) cmd = "%s -O forward -L 127.0.0.1:%i:%s:%i %s" % ( ssh, lport, remoteip, rport, server, ) (output, exitstatus) = pexpect.run(cmd, withexitstatus=True) if not exitstatus: atexit.register(_stop_tunnel, cmd.replace("-O forward", "-O cancel", 1)) return pid cmd = "%s -f -S none -L 127.0.0.1:%i:%s:%i %s sleep %i" % ( ssh, lport, remoteip, rport, server, timeout, ) # pop SSH_ASKPASS from env env = os.environ.copy() env.pop('SSH_ASKPASS', None) ssh_newkey = 'Are you sure you want to continue connecting' tunnel = pexpect.spawn(cmd, env=env) failed = False while True: try: i = tunnel.expect([ssh_newkey, _password_pat], timeout=0.1) if i == 0: raise SSHException( 'The authenticity of the host can\'t be established.' ) except pexpect.TIMEOUT: continue except pexpect.EOF: if tunnel.exitstatus: print(tunnel.exitstatus) print(tunnel.before) print(tunnel.after) raise RuntimeError("tunnel '%s' failed to start" % (cmd)) else: return tunnel.pid else: if failed: print("Password rejected, try again") password = None if password is None: password = getpass("%s's password: " % (server)) tunnel.sendline(password) failed = True def _stop_tunnel(cmd): pexpect.run(cmd) def _split_server(server): if '@' in server: username, server = server.split('@', 1) else: username = getuser() if ':' in server: server, port = server.split(':') port = int(port) else: port = 22 return username, server, port def paramiko_tunnel( lport, rport, server, remoteip='127.0.0.1', keyfile=None, password=None, timeout=60 ): """launch a tunner with paramiko in a subprocess. This should only be used when shell ssh is unavailable (e.g. Windows). This creates a tunnel redirecting `localhost:lport` to `remoteip:rport`, as seen from `server`. If you are familiar with ssh tunnels, this creates the tunnel: ssh server -L localhost:lport:remoteip:rport keyfile and password may be specified, but ssh config is checked for defaults. Parameters ---------- lport : int local port for connecting to the tunnel from this machine. rport : int port on the remote machine to connect to. server : str The ssh server to connect to. The full ssh server string will be parsed. user@server:port remoteip : str [Default: 127.0.0.1] The remote ip, specifying the destination of the tunnel. Default is localhost, which means that the tunnel would redirect localhost:lport on this machine to localhost:rport on the *server*. keyfile : str; path to public key file This specifies a key to be used in ssh login, default None. Regular default ssh keys will be used without specifying this argument. password : str; Your ssh password to the ssh server. Note that if this is left None, you will be prompted for it if passwordless key based login is unavailable. timeout : int [default: 60] The time (in seconds) after which no activity will result in the tunnel closing. This prevents orphaned tunnels from running forever. """ if paramiko is None: raise ImportError("Paramiko not available") if password is None: if not _try_passwordless_paramiko(server, keyfile): password = getpass("%s's password: " % (server)) p = Process( target=_paramiko_tunnel, args=(lport, rport, server, remoteip), kwargs=dict(keyfile=keyfile, password=password), ) p.daemon = True p.start() return p def _paramiko_tunnel(lport, rport, server, remoteip, keyfile=None, password=None): """Function for actually starting a paramiko tunnel, to be passed to multiprocessing.Process(target=this), and not called directly. """ username, server, port = _split_server(server) client = paramiko.SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.WarningPolicy()) try: client.connect( server, port, username=username, key_filename=keyfile, look_for_keys=True, password=password, ) # except paramiko.AuthenticationException: # if password is None: # password = getpass("%s@%s's password: "%(username, server)) # client.connect(server, port, username=username, password=password) # else: # raise except Exception as e: print('*** Failed to connect to %s:%d: %r' % (server, port, e)) sys.exit(1) # Don't let SIGINT kill the tunnel subprocess signal.signal(signal.SIGINT, signal.SIG_IGN) try: forward_tunnel(lport, remoteip, rport, client.get_transport()) except KeyboardInterrupt: print('SIGINT: Port forwarding stopped cleanly') sys.exit(0) except Exception as e: print("Port forwarding stopped uncleanly: %s" % e) sys.exit(255) if sys.platform == 'win32': ssh_tunnel = paramiko_tunnel else: ssh_tunnel = openssh_tunnel __all__ = [ 'tunnel_connection', 'ssh_tunnel', 'openssh_tunnel', 'paramiko_tunnel', 'try_passwordless_ssh', ]
ComputeNodeTest.py
########################################################################## # # Copyright (c) 2011-2012, John Haddon. All rights reserved. # Copyright (c) 2011-2013, Image Engine Design Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above # copyright notice, this list of conditions and the following # disclaimer. # # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided with # the distribution. # # * Neither the name of John Haddon nor the names of # any other contributors to this software may be used to endorse or # promote products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## import unittest import threading import time import IECore import Gaffer import GafferTest class ComputeNodeTest( GafferTest.TestCase ) : def testOperation( self ) : n1 = GafferTest.AddNode() n1["sum"].getValue() dirtiedPlugs = GafferTest.CapturingSlot( n1.plugDirtiedSignal() ) setPlugs = GafferTest.CapturingSlot( n1.plugSetSignal() ) n1["op1"].setValue( 2 ) self.assertEqual( len( setPlugs ), 1 ) self.assertEqual( len( dirtiedPlugs ), 2 ) self.assertEqual( setPlugs[0][0].fullName(), "AddNode.op1" ) self.assertEqual( dirtiedPlugs[0][0].fullName(), "AddNode.op1" ) self.assertEqual( dirtiedPlugs[1][0].fullName(), "AddNode.sum" ) n1["op2"].setValue( 3 ) self.assertEqual( len( setPlugs ), 2 ) self.assertEqual( setPlugs[1][0].fullName(), "AddNode.op2" ) # the dirty callback shouldn't have been triggered this time, # as the plug was already dirty. ## \todo Reintroduce me #self.assertEqual( len( dirtiedPlugs ), 1 ) del dirtiedPlugs[:] del setPlugs[:] # plug set or dirty signals are not emitted during computation self.assertEqual( n1.getChild("sum").getValue(), 5 ) self.assertEqual( len( setPlugs ), 0 ) self.assertEqual( len( dirtiedPlugs ), 0 ) # connect another add node onto the output of this one n2 = GafferTest.AddNode( "Add2" ) dirtiedPlugs2 = GafferTest.CapturingSlot( n2.plugDirtiedSignal() ) setPlugs2 = GafferTest.CapturingSlot( n2.plugSetSignal() ) n2["op1"].setInput( n1["sum"] ) # connecting a plug doesn't set the value of the input plug # immediately - the value is transferred only upon request. self.assertEqual( len( setPlugs2 ), 0 ) self.assertEqual( len( dirtiedPlugs2 ), 2 ) self.assertEqual( dirtiedPlugs2[0][0].fullName(), "Add2.op1" ) self.assertEqual( dirtiedPlugs2[1][0].fullName(), "Add2.sum" ) del dirtiedPlugs2[:] del setPlugs2[:] self.assertEqual( n2["op1"].getValue(), 5 ) self.assertEqual( n2["sum"].getValue(), 5 ) # plug set or dirty signals are not emitted during computation self.assertEqual( len( setPlugs2 ), 0 ) self.assertEqual( len( dirtiedPlugs2 ), 0 ) def testDirtyOfInputsWithConnections( self ) : n1 = GafferTest.AddNode( "n1" ) n2 = GafferTest.AddNode( "n2" ) dirtied = GafferTest.CapturingSlot( n1.plugDirtiedSignal(), n2.plugDirtiedSignal() ) n2["op1"].setInput( n1["sum"] ) self.assertEqual( len( dirtied ), 2 ) self.failUnless( dirtied[0][0].isSame( n2["op1"] ) ) self.failUnless( dirtied[1][0].isSame( n2["sum"] ) ) del dirtied[:] n1["op1"].setValue( 10 ) self.assertEqual( len( dirtied ), 4 ) self.failUnless( dirtied[0][0].isSame( n1["op1"] ) ) self.failUnless( dirtied[1][0].isSame( n1["sum"] ) ) self.failUnless( dirtied[2][0].isSame( n2["op1"] ) ) self.failUnless( dirtied[3][0].isSame( n2["sum"] ) ) self.assertEqual( n2.getChild( "sum" ).getValue(), 10 ) def testDirtyPlugComputesSameValueAsBefore( self ) : n1 = GafferTest.AddNode( "N1" ) n2 = GafferTest.AddNode( "N2" ) n2.getChild( "op1" ).setInput( n1.getChild( "sum" ) ) n1.getChild( "op1" ).setValue( 1 ) n1.getChild( "op2" ).setValue( -1 ) self.assertEqual( n2.getChild( "sum" ).getValue(), 0 ) def testOutputsDirtyForNewNodes( self ) : n = GafferTest.AddNode() n["op1"].setValue( 1 ) n["op2"].setValue( 2 ) self.assertEqual( n["sum"].getValue(), 3 ) def testComputeInContext( self ) : n = GafferTest.FrameNode() self.assertEqual( n["output"].getValue(), 1 ) c = Gaffer.Context() c.setFrame( 10 ) with c : self.assertEqual( n["output"].getValue(), 10 ) def testComputeInThreads( self ) : n = GafferTest.FrameNode() def f( frame ) : c = Gaffer.Context() c.setFrame( frame ) with c : time.sleep( 0.01 ) self.assertEqual( n["output"].getValue(), frame ) threads = [] for i in range( 0, 1000 ) : t = threading.Thread( target = f, args = ( i, ) ) t.start() threads.append( t ) for t in threads : t.join() def testDirtyNotPropagatedDuringCompute( self ) : n1 = GafferTest.AddNode( "n1" ) n2 = GafferTest.AddNode( "n2" ) n1["op1"].setValue( 2 ) n1["op2"].setValue( 3 ) n2["op1"].setInput( n1["sum"] ) dirtyCapturer = GafferTest.CapturingSlot( n2.plugDirtiedSignal() ) self.assertEqual( n2["sum"].getValue(), 5 ) self.assertEqual( len( dirtyCapturer ), 0 ) def testWrongPlugSet( self ) : n = GafferTest.BadNode() self.assertRaises( RuntimeError, n["out1"].getValue ) def testPlugNotSet( self ) : n = GafferTest.BadNode() self.assertRaises( RuntimeError, n["out3"].getValue ) def testHash( self ) : n = GafferTest.MultiplyNode() self.assertHashesValid( n ) def testHashForPythonDerivedClasses( self ) : n = GafferTest.AddNode() self.assertHashesValid( n ) def testDisableCaching( self ) : n = GafferTest.CachingTestNode() n["in"].setValue( "d" ) v1 = n["out"].getValue( _copy=False ) v2 = n["out"].getValue( _copy=False ) self.assertEqual( v1, v2 ) self.assertEqual( v1, IECore.StringData( "d" ) ) # the objects should be one and the same, as the second computation # should have shortcut and returned a cached result. self.failUnless( v1.isSame( v2 ) ) n["out"].setFlags( Gaffer.Plug.Flags.Cacheable, False ) v3 = n["out"].getValue( _copy=False ) self.assertEqual( v3, IECore.StringData( "d" ) ) self.assertEqual( v3, v1 ) # we disabled caching, so the two values should # be distinct objects, even though they are equal. self.failIf( v3.isSame( v1 ) ) def testConnectedPlugsShareHashesAndCacheEntries( self ) : class Out( Gaffer.ComputeNode ) : def __init__( self, name="Out" ) : Gaffer.ComputeNode.__init__( self, name ) self.addChild( Gaffer.ObjectPlug( "oOut", Gaffer.Plug.Direction.Out, IECore.NullObject() ) ) self.addChild( Gaffer.FloatPlug( "fOut", Gaffer.Plug.Direction.Out ) ) def affects( self, input ) : return [] def hash( self, output, context, h ) : h.append( context.getFrame() ) def compute( self, plug, context ) : if plug.getName() == "oOut" : plug.setValue( IECore.IntData( int( context.getFrame() ) ) ) else : plug.setValue( context.getFrame() ) IECore.registerRunTimeTyped( Out ) class In( Gaffer.ComputeNode ) : def __init__( self, name="In" ) : Gaffer.ComputeNode.__init__( self, name ) self.addChild( Gaffer.ObjectPlug( "oIn", Gaffer.Plug.Direction.In, IECore.NullObject() ) ) self.addChild( Gaffer.IntPlug( "iIn", Gaffer.Plug.Direction.In ) ) IECore.registerRunTimeTyped( In ) nOut = Out() nIn = In() nIn["oIn"].setInput( nOut["oOut"] ) nIn["iIn"].setInput( nOut["fOut"] ) for i in range( 0, 1000 ) : c = Gaffer.Context() c.setFrame( i ) with c : # because oIn and oOut are connected, they should # have the same hash and share the exact same value. self.assertEqual( nIn["oIn"].getValue(), IECore.IntData( i ) ) self.assertEqual( nOut["oOut"].getValue(), IECore.IntData( i ) ) self.assertEqual( nIn["oIn"].hash(), nOut["oOut"].hash() ) self.failUnless( nIn["oIn"].getValue( _copy=False ).isSame( nOut["oOut"].getValue( _copy=False ) ) ) # even though iIn and fOut are connected, they should have # different hashes and different values, because type conversion # (float to int) is performed when connecting them. self.assertEqual( nIn["iIn"].getValue(), i ) self.assertEqual( nOut["fOut"].getValue(), float( i ) ) self.assertNotEqual( nIn["iIn"].hash(), nOut["fOut"].hash() ) class PassThrough( Gaffer.ComputeNode ) : def __init__( self, name="PassThrough", inputs={}, dynamicPlugs=() ) : Gaffer.ComputeNode.__init__( self, name ) self.addChild( Gaffer.ObjectPlug( "in", Gaffer.Plug.Direction.In, IECore.NullObject() ) ) self.addChild( Gaffer.ObjectPlug( "out", Gaffer.Plug.Direction.Out, IECore.NullObject() ) ) def affects( self, input ) : assert( input.isSame( self["in"] ) ) return [ self["out"] ] def hash( self, output, context, h ) : assert( output.isSame( self["out"] ) ) # by assigning directly to the hash rather than appending, # we signify that we'll pass through the value unchanged. h.copyFrom( self["in"].hash() ) def compute( self, plug, context ) : assert( plug.isSame( self["out"] ) ) plug.setValue( self["in"].getValue( _copy=False ), _copy=False ) IECore.registerRunTimeTyped( PassThrough ) def testPassThroughSharesHashes( self ) : n = self.PassThrough() n["in"].setValue( IECore.MeshPrimitive.createPlane( IECore.Box2f( IECore.V2f( -1 ), IECore.V2f( 1 ) ) ) ) self.assertEqual( n["in"].hash(), n["out"].hash() ) self.assertEqual( n["in"].getValue(), n["out"].getValue() ) def testPassThroughSharesCacheEntries( self ) : n = self.PassThrough() n["in"].setValue( IECore.MeshPrimitive.createPlane( IECore.Box2f( IECore.V2f( -1 ), IECore.V2f( 1 ) ) ) ) # this fails because TypedObjectPlug::setValue() currently does a copy. i think we can # optimise things by allowing a copy-free setValue() function for use during computations. self.failUnless( n["in"].getValue( _copy=False ).isSame( n["out"].getValue( _copy=False ) ) ) def testInternalConnections( self ) : a = GafferTest.AddNode() a["op1"].setValue( 10 ) n = Gaffer.Node() n["in"] = Gaffer.IntPlug() n["out"] = Gaffer.IntPlug( direction = Gaffer.Plug.Direction.Out ) n["out"].setInput( n["in"] ) n["in"].setInput( a["sum"] ) self.assertEqual( n["out"].getValue(), a["sum"].getValue() ) self.assertEqual( n["out"].hash(), a["sum"].hash() ) if __name__ == "__main__": unittest.main()
__init__.py
""" exos.py EXpressions Over Statements: extended functional tools in Python. """ import threading from functools import partial, reduce from inspect import getfullargspec from operator import iconcat from .decorators import curry, fattr, memoize from .exceptions import NonExhaustivePattern from .io import each, peach, print_each, ueach from .utils import pairs __author__ = "Bruno Lange" __email__ = "blangeram@gmail.com" __license__ = "MIT" def when(*args): """ A declarative approach to a switch statement. >>> a = 42 >>> c = when( a < 4, 'less than 4', a < 10, 'less than 10', a == 42, 'the answer!', ) >>> print(c) The answer! If you'd like to defer evaluation of either the predicate or the actual value, use a lambda or a partial constructor to emulate laziness. """ _pairs = pairs(*args) last = _pairs[-1] if len(last) < 2: last.insert(0, True) for predicate, value in _pairs: predicate = predicate() if callable(predicate) else predicate if predicate: return value() if callable(value) else value raise NonExhaustivePattern() def flip(fn): """ Takes a function that takes two or more positional parameters and returns another one where the first two positional parameters are flipped. >>> def statement(a, b): ... print("I'm {} if and only if I'm {}.".format(a, b)) ... >>> statement('alive', 'breathing') I'm alive if and only if I'm breathing. >>> flip(statement)('alive', 'breathing') I'm breathing if and only if I'm alive. """ spec = getfullargspec(fn) arity = len(spec.args) - len(spec.defaults or ()) if arity < 2: return fn def flipped(*args, **kwargs): swapped = (args[1], args[0]) + args[2:] return ( fn(*swapped, **kwargs) if len(args) == arity else partial(fn, *swapped, **kwargs) ) return flipped def mattr(attr): """ Returns a mapper function for attribute extraction mattr('user') <=> lambda account: account.user mattr('user.email') <=> lambda account: account.user.email """ return partial(reduce, getattr, attr.split(".")) class XAttrNoDefault: pass def xattr(obj, attr, default=XAttrNoDefault): """ Similar to getattr except it allows for deep extraction of attributes by splitting them with a dot. Unless a default value is provided, an AttributeError exception is thrown when the attribute does not exist. >>> xattr(matrix, 'rank') # same as getattr(matrix, 'rank') or matrix.rank 4 >>> xattr(wave, 'amplitude.imag') 1.618 """ return reduce( getattr if default is XAttrNoDefault else lambda acc, curr: getattr(acc, curr, default), attr.split('.'), obj ) def map_attr(attr, iterable): """ Returns a map object where each item corresponds to the extracted attribute given by `attr` from the original object in the iterable collection. """ return map(mattr(attr), iterable) def mmethod(path, *args, **kwargs): """ Returns a mapper function that runs the path method for each instance of the iterable collection. >>> mmethod('start') is equivalent to >>> lambda thread: thread.start() >>> mmethod('book_set.filter', number_of_pages__gte=100) is equivalent to >>> lambda author: author.book_set.filter(number_of_pages__gte=100) """ return lambda x: mattr(path)(x)(*args, **kwargs) def map_method(path, iterable): """ Returns a map object in which each item corresponds to the method given by `path` called on the objects in the iterable collection. >>> map('magnitude', [v1, v2, v3]) is equivalent to >>> map(lambda v: v.magnitude(), [v1, v2, v3]) """ return map(mmethod(path), iterable) def flatten(xs): """ Flattens list of lists. >>> flatten([[1], [2], [3]) >>> [1,2,3] >>> flatten([[10], [], [55]) >>> [10, 55] """ return reduce(iconcat, xs, []) def zip_with_map(mapper, iterable): """ Returns a collection of pairs where the first element correspond to the item in the iterable and the second is its mapped version, that is, the item when applied to the mapper function. >>> zip_with_map(lambda x: x**2, [1,2,3]) >>> [(1,1), (2,4), (3,9)] """ return zip(iterable, map(mapper, iterable)) def zip_with_attr(iterable, *attrs): """ Zips collection of objects with instance attribute zip(cars, (car.price for car in cars)) <=> zip_with_attr(cars, 'price') """ return zip( iterable, *(tuple(xattr(item, attr) for item in iterable) for attr in attrs) ) def unpack(iterable, *attrs): """Unpacks the iterable in parity with the attributes passed. ``` >>> @dataclass ... class Point: ... u: float ... v: float ... >>> points: List[Point] = get_points() >>> for u, v in unpack(points, "u", "v"): ... print(u, v) ``` """ for x in iterable: yield tuple(getattr(x, a) for a in attrs) def extend(*dicts): """ Returns a dictionary that combines all dictionaries passed as arguments without mutating any of them. """ def fold(acc, curr): acc.update(curr) return acc return reduce(fold, dicts, {}) def reduce_right(fold, xs, x0): """ Right-associative fold of a structure. """ return reduce(flip(fold), reversed(xs), x0) class Identity: """Utility class for the composition function. Acts as a placeholder for the initial value of the reduction functions """ pass _compose = lambda f, g: ( f if g is Identity else lambda *args, **kwargs: f(g(*args, **kwargs)) ) def compose(*fns): """ Simple function composition. """ return reduce_right(_compose, fns, Identity) def pipe(*fns): """ Left-to-right composition, Unix style. """ return reduce(flip(_compose), fns, Identity) def setattr_(obj, name, value): """Similar to setattr except it returns back the modified object. """ setattr(obj, name, value) return obj def setattrs(obj, *args, **kwargs): """Allows for multiple attributes to be set from dictionaries passed as positional arguments or named parameters. >>> setattrs(car, {'make': 'Jeep', 'model': 'Patriot'}, year=2011) <__main__.Car object at 0x103331320> >>> car.make, car.model, car.year ('Jeep', 'Patriot', 2011) """ attrs = extend(*args, kwargs) return reduce( lambda acc, curr: setattr_(acc, *curr), # curr <- (k, v) attrs.items(), obj ) def take(n, collection): """Returns at most n items from the collection in a list >>> take(4, range(100000, 1000000, 4)) [100000, 100004, 100008, 100012] >>> take(10, ['hello', 'world']) ['hello', 'world'] """ return [item for item, _ in zip(collection, range(n))] def take_while(predicate, collection): """Returns a list corresponding to the longest prefix of the original list for which all the values when tested against the given predicate return True >>> take_while(lambda x: x<=10, range(10000)) [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] """ payload = [] for item in collection: if not predicate(item): break payload.append(item) return payload def tmap(fn, collection): """Concurrent map. Map each item in the collection with the provided function in a separate thread. `tmap` can drastically improve performance when compared to `map` but should only be used for IO-bound functions that have no side effects. >>> tmap(download, urls) [<url1_data>, <url2_data>, <url3_data>] >>> datasets = tmap(read_csv, files) """ n = len(collection) payload = [None] * n def process(i): payload[i] = fn(collection[i]) threads = [threading.Thread(target=process, args=(i,)) for i in range(n)] each("start", threads) each("join", threads) return payload def teach(fn, collection): """Concurrently digest each item in the collection with the provided function. >>> tmap(save_to_disk, documents) """ threads = [threading.Thread(target=fn, args=(item,)) for item in collection] each("start", threads) each("join", threads)
_techreview-textEditor.py
""" ################################################################################ PyEdit 2.1: a Python/tkinter text file editor and component. Uses the Tk text widget, plus GuiMaker menus and toolbar buttons to implement a full-featured text editor that can be run as a standalone program, and attached as a component to other GUIs. Also used by PyMailGUI and PyView to edit mail text and image file notes, and by PyMailGUI and PyDemos in pop-up mode to display source and text files. New in version 2.1 (4E) -updated to run under Python 3.X (3.1) -added "grep" search menu option and dialog: threaded external files search -verify app exit on quit if changes in other edit windows in process -supports arbitrary Unicode encodings for files: per textConfig.py settings -update change and font dialog implementations to allow many to be open -runs self.update() before setting text in new editor for loadFirst -various improvements to the Run Code option, per the next section 2.1 Run Code improvements: -use base name after chdir to run code file, not possibly relative path -use launch modes that support arguments for run code file mode on Windows -run code inherits launchmodes backslash conversion (no longer required) New in version 2.0 (3E) -added simple font components input dialog -use Tk 8.4 undo stack API to add undo/redo text modifications -now verifies on quit, open, new, run, only if text modified and unsaved -searches are case-insensitive now by default -configuration module for initial font/color/size/searchcase TBD (and suggested exercises): -could also allow search case choice in GUI (not just config file) -could use re patterns for searches (see text chapter) -could experiment with syntax-directed text colorization (see IDLE, others) -could try to verify app exit for quit() in non-managed windows too? -could queue each result as found in grep dialog thread to avoid delay -could use images in toolbar buttons (per examples of this in Chapter 9) -could scan line to map Tk insert position column to account for tabs on Info ################################################################################ """ Version = '2.1' import sys, os # platform, args, run tools from tkinter import * # base widgets, constants from tkinter.filedialog import Open, SaveAs # standard dialogs from tkinter.messagebox import showinfo, showerror, askyesno from tkinter.simpledialog import askstring, askinteger from tkinter.colorchooser import askcolor from PP4E.Gui.Tools.guimaker import * # Frame + menu/toolbar builders # general configurations try: import textConfig # startup font and colors configs = textConfig.__dict__ # work if not on the path or bad except: # define in client app directory configs = {} helptext = """PyEdit version %s April, 2010 (2.0: January, 2006) (1.0: October, 2000) Programming Python, 4th Edition Mark Lutz, for O'Reilly Media, Inc. A text editor program and embeddable object component, written in Python/tkinter. Use menu tear-offs and toolbar for quick access to actions, and Alt-key shortcuts for menus. Additions in version %s: - supports Python 3.X - new "grep" external files search dialog - verifies app quit if other edit windows changed - supports arbitrary Unicode encodings for files - allows multiple change and font dialogs - various improvements to the Run Code option Prior version additions: - font pick dialog - unlimited undo/redo - quit/open/new/run prompt save only if changed - searches are case-insensitive - startup configuration module textConfig.py """ START = '1.0' # index of first char: row=1,col=0 SEL_FIRST = SEL + '.first' # map sel tag to index SEL_LAST = SEL + '.last' # same as 'sel.last' FontScale = 0 # use bigger font on Linux if sys.platform[:3] != 'win': # and other non-Windows boxes FontScale = 3 ################################################################################ # Main class: implements editor GUI, actions # requires a flavor of GuiMaker to be mixed in by more specific subclasses; # not a direct subclass of GuiMaker because that class takes multiple forms. ################################################################################ class TextEditor: # mix with menu/toolbar Frame class startfiledir = '.' # for dialogs editwindows = [] # for process-wide quit check # Unicode configurations # imported in class to allow overrides in subclass or self if __name__ == '__main__': from textConfig import ( # my dir is on the path opensAskUser, opensEncoding, savesUseKnownEncoding, savesAskUser, savesEncoding) else: from .textConfig import ( # 2.1: always from this package opensAskUser, opensEncoding, savesUseKnownEncoding, savesAskUser, savesEncoding) ftypes = [('All files', '*'), # for file open dialog ('Text files', '.txt'), # customize in subclass ('Python files', '.py')] # or set in each instance colors = [{'fg':'black', 'bg':'white'}, # color pick list {'fg':'yellow', 'bg':'black'}, # first item is default {'fg':'white', 'bg':'blue'}, # tailor me as desired {'fg':'black', 'bg':'beige'}, # or do PickBg/Fg chooser {'fg':'yellow', 'bg':'purple'}, {'fg':'black', 'bg':'brown'}, {'fg':'lightgreen', 'bg':'darkgreen'}, {'fg':'darkblue', 'bg':'orange'}, {'fg':'orange', 'bg':'darkblue'}] fonts = [('courier', 9+FontScale, 'normal'), # platform-neutral fonts ('courier', 12+FontScale, 'normal'), # (family, size, style) ('courier', 10+FontScale, 'bold'), # or pop up a listbox ('courier', 10+FontScale, 'italic'), # make bigger on Linux ('times', 10+FontScale, 'normal'), # use 'bold italic' for 2 ('helvetica', 10+FontScale, 'normal'), # also 'underline', etc. ('ariel', 10+FontScale, 'normal'), ('system', 10+FontScale, 'normal'), ('courier', 20+FontScale, 'normal')] def __init__(self, loadFirst='', loadEncode=''): if not isinstance(self, GuiMaker): raise TypeError('TextEditor needs a GuiMaker mixin') self.setFileName(None) self.lastfind = None self.openDialog = None self.saveDialog = None self.knownEncoding = None # 2.1 Unicode: till Open or Save self.text.focus() # else must click in text if loadFirst: self.update() # 2.1: else @ line 2; see book self.onOpen(loadFirst, loadEncode) def start(self): # run by GuiMaker.__init__ self.menuBar = [ # configure menu/toolbar ('File', 0, # a GuiMaker menu def tree [('Open...', 0, self.onOpen), # build in method for self ('Save', 0, self.onSave), # label, shortcut, callback ('Save As...', 5, self.onSaveAs), ('New', 0, self.onNew), 'separator', ('Quit...', 0, self.onQuit)] ), ('Edit', 0, [('Undo', 0, self.onUndo), ('Redo', 0, self.onRedo), 'separator', ('Cut', 0, self.onCut), ('Copy', 1, self.onCopy), ('Paste', 0, self.onPaste), 'separator', ('Delete', 0, self.onDelete), ('Select All', 0, self.onSelectAll)] ), ('Search', 0, [('Goto...', 0, self.onGoto), ('Find...', 0, self.onFind), ('Refind', 0, self.onRefind), ('Change...', 0, self.onChange), ('Grep...', 3, self.onGrep)] ), ('Tools', 0, [('Pick Font...', 6, self.onPickFont), ('Font List', 0, self.onFontList), 'separator', ('Pick Bg...', 3, self.onPickBg), ('Pick Fg...', 0, self.onPickFg), ('Color List', 0, self.onColorList), 'separator', ('Info...', 0, self.onInfo), ('Clone', 1, self.onClone), ('Run Code', 0, self.onRunCode)] )] self.toolBar = [ ('Save', self.onSave, {'side': LEFT}), ('Cut', self.onCut, {'side': LEFT}), ('Copy', self.onCopy, {'side': LEFT}), ('Paste', self.onPaste, {'side': LEFT}), ('Find', self.onRefind, {'side': LEFT}), ('Help', self.help, {'side': RIGHT}), ('Quit', self.onQuit, {'side': RIGHT})] def makeWidgets(self): # run by GuiMaker.__init__ name = Label(self, bg='black', fg='white') # add below menu, above tool name.pack(side=TOP, fill=X) # menu/toolbars are packed # GuiMaker frame packs itself vbar = Scrollbar(self) hbar = Scrollbar(self, orient='horizontal') text = Text(self, padx=5, wrap='none') # disable line wrapping text.config(undo=1, autoseparators=1) # 2.0, default is 0, 1 vbar.pack(side=RIGHT, fill=Y) hbar.pack(side=BOTTOM, fill=X) # pack text last text.pack(side=TOP, fill=BOTH, expand=YES) # else sbars clipped text.config(yscrollcommand=vbar.set) # call vbar.set on text move text.config(xscrollcommand=hbar.set) vbar.config(command=text.yview) # call text.yview on scroll move hbar.config(command=text.xview) # or hbar['command']=text.xview # 2.0: apply user configs or defaults startfont = configs.get('font', self.fonts[0]) startbg = configs.get('bg', self.colors[0]['bg']) startfg = configs.get('fg', self.colors[0]['fg']) text.config(font=startfont, bg=startbg, fg=startfg) if 'height' in configs: text.config(height=configs['height']) if 'width' in configs: text.config(width =configs['width']) self.text = text self.filelabel = name ############################################################################ # File menu commands ############################################################################ def my_askopenfilename(self): # objects remember last result dir/file if not self.openDialog: self.openDialog = Open(initialdir=self.startfiledir, filetypes=self.ftypes) return self.openDialog.show() def my_asksaveasfilename(self): # objects remember last result dir/file if not self.saveDialog: self.saveDialog = SaveAs(initialdir=self.startfiledir, filetypes=self.ftypes) return self.saveDialog.show() def onOpen(self, loadFirst='', loadEncode=''): """ 2.1: total rewrite for Unicode support; open in text mode with an encoding passed in, input from the user, in textconfig, or platform default, or open as binary bytes for arbitrary Unicode encodings as last resort and drop \r in Windows end-lines if present so text displays normally; content fetches are returned as str, so need to encode on saves: keep encoding used here; tests if file is okay ahead of time to try to avoid opens; we could also load and manually decode bytes to str to avoid multiple open attempts, but this is unlikely to try all cases; encoding behavior is configurable in the local textConfig.py: 1) tries known type first if passed in by client (email charsets) 2) if opensAskUser True, try user input next (prefill wih defaults) 3) if opensEncoding nonempty, try this encoding next: 'latin-1', etc. 4) tries sys.getdefaultencoding() platform default next 5) uses binary mode bytes and Tk policy as the last resort """ if self.text_edit_modified(): # 2.0 if not askyesno('PyEdit', 'Text has changed: discard changes?'): return file = loadFirst or self.my_askopenfilename() if not file: return if not os.path.isfile(file): showerror('PyEdit', 'Could not open file ' + file) return # try known encoding if passed and accurate (e.g., email) text = None # empty file = '' = False: test for None! if loadEncode: try: text = open(file, 'r', encoding=loadEncode).read() self.knownEncoding = loadEncode except (UnicodeError, LookupError, IOError): # lookup: bad name pass # try user input, prefill with next choice as default if text == None and self.opensAskUser: self.update() # else dialog doesn't appear in rare cases askuser = askstring('PyEdit', 'Enter Unicode encoding for open', initialvalue=(self.opensEncoding or sys.getdefaultencoding() or '')) if askuser: try: text = open(file, 'r', encoding=askuser).read() self.knownEncoding = askuser except (UnicodeError, LookupError, IOError): pass # try config file (or before ask user?) if text == None and self.opensEncoding: try: text = open(file, 'r', encoding=self.opensEncoding).read() self.knownEncoding = self.opensEncoding except (UnicodeError, LookupError, IOError): pass # try platform default (utf-8 on windows; try utf8 always?) if text == None: try: text = open(file, 'r', encoding=sys.getdefaultencoding()).read() self.knownEncoding = sys.getdefaultencoding() except (UnicodeError, LookupError, IOError): pass # last resort: use binary bytes and rely on Tk to decode if text == None: try: text = open(file, 'rb').read() # bytes for Unicode text = text.replace(b'\r\n', b'\n') # for display, saves self.knownEncoding = None except IOError: pass if text == None: showerror('PyEdit', 'Could not decode and open file ' + file) else: self.setAllText(text) self.setFileName(file) self.text.edit_reset() # 2.0: clear undo/redo stks self.text.edit_modified(0) # 2.0: clear modified flag def onSave(self): self.onSaveAs(self.currfile) # may be None def onSaveAs(self, forcefile=None): """ 2.1: total rewrite for Unicode support: Text content is always returned as a str, so we must deal with encodings to save to a file here, regardless of open mode of the output file (binary requires bytes, and text must encode); tries the encoding used when opened or saved (if known), user input, config file setting, and platform default last; most users can use platform default; retains successful encoding name here for next save, because this may be the first Save after New or a manual text insertion; Save and SaveAs may both use last known enocding, per config file (it probably should be used for Save, but SaveAs usage is unclear); gui prompts are prefilled with the known encoding if there is one; does manual text.encode() to avoid creating file; text mode files perform platform specific end-line conversion: Windows \r dropped if present on open by text mode (auto) and binary mode (manually); if manual content inserts, must delete \r else duplicates here; knownEncoding=None before first Open or Save, after New, if binary Open; encoding behavior is configurable in the local textConfig.py: 1) if savesUseKnownEncoding > 0, try encoding from last open or save 2) if savesAskUser True, try user input next (prefill with known?) 3) if savesEncoding nonempty, try this encoding next: 'utf-8', etc 4) tries sys.getdefaultencoding() as a last resort """ filename = forcefile or self.my_asksaveasfilename() if not filename: return text = self.getAllText() # 2.1: a str string, with \n eolns, encpick = None # even if read/inserted as bytes # try known encoding at latest Open or Save, if any if self.knownEncoding and ( # enc known? (forcefile and self.savesUseKnownEncoding >= 1) or # on Save? (not forcefile and self.savesUseKnownEncoding >= 2)): # on SaveAs? try: text.encode(self.knownEncoding) encpick = self.knownEncoding except UnicodeError: pass # try user input, prefill with known type, else next choice if not encpick and self.savesAskUser: self.update() # else dialog doesn't appear in rare cases askuser = askstring('PyEdit', 'Enter Unicode encoding for save', initialvalue=(self.knownEncoding or self.savesEncoding or sys.getdefaultencoding() or '')) if askuser: try: text.encode(askuser) encpick = askuser except (UnicodeError, LookupError): # LookupError: bad name pass # UnicodeError: can't encode # try config file if not encpick and self.savesEncoding: try: text.encode(self.savesEncoding) encpick = self.savesEncoding except (UnicodeError, LookupError): pass # try platform default (utf8 on windows) if not encpick: try: text.encode(sys.getdefaultencoding()) encpick = sys.getdefaultencoding() except (UnicodeError, LookupError): pass # open in text mode for endlines + encoding if not encpick: showerror('PyEdit', 'Could not encode for file ' + filename) else: try: file = open(filename, 'w', encoding=encpick) file.write(text) file.close() except: showerror('PyEdit', 'Could not write file ' + filename) else: self.setFileName(filename) # may be newly created self.text.edit_modified(0) # 2.0: clear modified flag self.knownEncoding = encpick # 2.1: keep enc for next save # don't clear undo/redo stks! def onNew(self): """ start editing a new file from scratch in current window; see onClone to pop-up a new independent edit window instead; """ if self.text_edit_modified(): # 2.0 if not askyesno('PyEdit', 'Text has changed: discard changes?'): return self.setFileName(None) self.clearAllText() self.text.edit_reset() # 2.0: clear undo/redo stks self.text.edit_modified(0) # 2.0: clear modified flag self.knownEncoding = None # 2.1: Unicode type unknown def onQuit(self): """ on Quit menu/toolbar select and wm border X button in toplevel windows; 2.1: don't exit app if others changed; 2.0: don't ask if self unchanged; moved to the top-level window classes at the end since may vary per usage: a Quit in GUI might quit() to exit, destroy() just one Toplevel, Tk, or edit frame, or not be provided at all when run as an attached component; check self for changes, and if might quit(), main windows should check other windows in the process-wide list to see if they have changed too; """ assert False, 'onQuit must be defined in window-specific sublass' def text_edit_modified(self): """ 2.1: this now works! seems to have been a bool result type issue in tkinter; 2.0: self.text.edit_modified() broken in Python 2.4: do manually for now; """ return self.text.edit_modified() #return self.tk.call((self.text._w, 'edit') + ('modified', None)) ############################################################################ # Edit menu commands ############################################################################ def onUndo(self): # 2.0 try: # tk8.4 keeps undo/redo stacks self.text.edit_undo() # exception if stacks empty except TclError: # menu tear-offs for quick undo showinfo('PyEdit', 'Nothing to undo') def onRedo(self): # 2.0: redo an undone try: self.text.edit_redo() except TclError: showinfo('PyEdit', 'Nothing to redo') def onCopy(self): # get text selected by mouse, etc. if not self.text.tag_ranges(SEL): # save in cross-app clipboard showerror('PyEdit', 'No text selected') else: text = self.text.get(SEL_FIRST, SEL_LAST) self.clipboard_clear() self.clipboard_append(text) def onDelete(self): # delete selected text, no save if not self.text.tag_ranges(SEL): showerror('PyEdit', 'No text selected') else: self.text.delete(SEL_FIRST, SEL_LAST) def onCut(self): if not self.text.tag_ranges(SEL): showerror('PyEdit', 'No text selected') else: self.onCopy() # save and delete selected text self.onDelete() def onPaste(self): try: text = self.selection_get(selection='CLIPBOARD') except TclError: showerror('PyEdit', 'Nothing to paste') return self.text.insert(INSERT, text) # add at current insert cursor self.text.tag_remove(SEL, '1.0', END) self.text.tag_add(SEL, INSERT+'-%dc' % len(text), INSERT) self.text.see(INSERT) # select it, so it can be cut def onSelectAll(self): self.text.tag_add(SEL, '1.0', END+'-1c') # select entire text self.text.mark_set(INSERT, '1.0') # move insert point to top self.text.see(INSERT) # scroll to top ############################################################################ # Search menu commands ############################################################################ def onGoto(self, forceline=None): line = forceline or askinteger('PyEdit', 'Enter line number') self.text.update() self.text.focus() if line is not None: maxindex = self.text.index(END+'-1c') maxline = int(maxindex.split('.')[0]) if line > 0 and line <= maxline: self.text.mark_set(INSERT, '%d.0' % line) # goto line self.text.tag_remove(SEL, '1.0', END) # delete selects self.text.tag_add(SEL, INSERT, 'insert + 1l') # select line self.text.see(INSERT) # scroll to line else: showerror('PyEdit', 'Bad line number') def onFind(self, lastkey=None): key = lastkey or askstring('PyEdit', 'Enter search string') self.text.update() self.text.focus() self.lastfind = key if key: # 2.0: nocase nocase = configs.get('caseinsens', True) # 2.0: config where = self.text.search(key, INSERT, END, nocase=nocase) if not where: # don't wrap showerror('PyEdit', 'String not found') else: pastkey = where + '+%dc' % len(key) # index past key self.text.tag_remove(SEL, '1.0', END) # remove any sel self.text.tag_add(SEL, where, pastkey) # select key self.text.mark_set(INSERT, pastkey) # for next find self.text.see(where) # scroll display def onRefind(self): self.onFind(self.lastfind) def onChange(self): """ non-modal find/change dialog 2.1: pass per-dialog inputs to callbacks, may be > 1 change dialog open """ new = Toplevel(self) new.title('PyEdit - change') Label(new, text='Find text?', relief=RIDGE, width=15).grid(row=0, column=0) Label(new, text='Change to?', relief=RIDGE, width=15).grid(row=1, column=0) entry1 = Entry(new) entry2 = Entry(new) entry1.grid(row=0, column=1, sticky=EW) entry2.grid(row=1, column=1, sticky=EW) def onFind(): # use my entry in enclosing scope self.onFind(entry1.get()) # runs normal find dialog callback def onApply(): self.onDoChange(entry1.get(), entry2.get()) Button(new, text='Find', command=onFind ).grid(row=0, column=2, sticky=EW) Button(new, text='Apply', command=onApply).grid(row=1, column=2, sticky=EW) new.columnconfigure(1, weight=1) # expandable entries def onDoChange(self, findtext, changeto): # on Apply in change dialog: change and refind if self.text.tag_ranges(SEL): # must find first self.text.delete(SEL_FIRST, SEL_LAST) self.text.insert(INSERT, changeto) # deletes if empty self.text.see(INSERT) self.onFind(findtext) # goto next appear self.text.update() # force refresh def onGrep(self): """ new in version 2.1: threaded external file search; search matched filenames in directory tree for string; listbox clicks open matched file at line of occurrence; search is threaded so the GUI remains active and is not blocked, and to allow multiple greps to overlap in time; could use threadtools, but avoid loop in no active grep; """ from PP4E.Gui.ShellGui.formrows import makeFormRow # nonmodal dialog: get dirnname, filenamepatt, grepkey popup = Toplevel() popup.title('PyEdit - grep') var1 = makeFormRow(popup, label='Directory root', width=18, browse=False) var2 = makeFormRow(popup, label='Filename pattern', width=18, browse=False) var3 = makeFormRow(popup, label='Search string', width=18, browse=False) var1.set('.') # current dir var2.set('*.py') # initial values Button(popup, text='Go', command=lambda: self.onDoGrep(var1.get(), var2.get(), var3.get())).pack() def onDoGrep(self, dirname, filenamepatt, grepkey): # on Go in grep dialog: populate scrolled list with matches # tbd: should producer thread be daemon so dies with app? import threading, queue # make non-modal un-closeable dialog mypopup = Tk() mypopup.title('PyEdit - grepping') status = Label(mypopup, text='Grep thread searching for: %r...' % grepkey) status.pack(padx=20, pady=20) mypopup.protocol('WM_DELETE_WINDOW', lambda: None) # ignore X close # start producer thread, consumer loop myqueue = queue.Queue() threadargs = (filenamepatt, dirname, grepkey, myqueue) threading.Thread(target=self.grepThreadProducer, args=threadargs).start() self.grepThreadConsumer(grepkey, myqueue, mypopup) def grepThreadProducer(self, filenamepatt, dirname, grepkey, myqueue): """ in a non-GUI parallel thread: queue find.find results list; could also queue matches as found, but need to keep window; """ from PP4E.Tools.find import find matches = [] for filepath in find(pattern=filenamepatt, startdir=dirname): try: for (linenum, linestr) in enumerate(open(filepath)): if grepkey in linestr: message = '%s@%d [%s]' % (filepath, linenum + 1, linestr) matches.append(message) except UnicodeDecodeError: print('Unicode error in:', filepath) myqueue.put(matches) def grepThreadConsumer(self, grepkey, myqueue, mypopup): """ in the main GUI thread: watch queue for results or []; there may be multiple active grep threads/loops/queues; there may be other types of threads/checkers in process, especially when PyEdit is attached component (PyMailGUI); """ import queue try: matches = myqueue.get(block=False) except queue.Empty: self.after(250, self.grepThreadConsumer, grepkey, myqueue, mypopup) else: mypopup.destroy() # close status self.update() # erase it now if not matches: showinfo('PyEdit', 'Grep found no matches for: %r' % grepkey) else: self.grepMatchesList(matches, grepkey) def grepMatchesList(self, matches, grepkey): # populate list after successful matches from PP4E.Gui.Tour.scrolledlist import ScrolledList print('Matches for %s: %s' % (grepkey, len(matches))) # catch list double-click class ScrolledFilenames(ScrolledList): def runCommand(self, selection): file, line = selection.split(' [', 1)[0].split('@') editor = TextEditorMainPopup(loadFirst=file, winTitle=' grep match') editor.onGoto(int(line)) editor.text.focus_force() # no, really # new non-modal widnow popup = Tk() popup.title('PyEdit - grep matches: %r' % grepkey) ScrolledFilenames(parent=popup, options=matches) ############################################################################ # Tools menu commands ############################################################################ def onFontList(self): self.fonts.append(self.fonts[0]) # pick next font in list del self.fonts[0] # resizes the text area self.text.config(font=self.fonts[0]) def onColorList(self): self.colors.append(self.colors[0]) # pick next color in list del self.colors[0] # move current to end self.text.config(fg=self.colors[0]['fg'], bg=self.colors[0]['bg']) def onPickFg(self): self.pickColor('fg') # added on 10/02/00 def onPickBg(self): # select arbitrary color self.pickColor('bg') # in standard color dialog def pickColor(self, part): # this is too easy (triple, hexstr) = askcolor() if hexstr: self.text.config(**{part: hexstr}) def onInfo(self): """ pop-up dialog giving text statistics and cursor location; caveat (2.1): Tk insert position column counts a tab as one character: translate to next multiple of 8 to match visual? """ text = self.getAllText() # added on 5/3/00 in 15 mins bytes = len(text) # words uses a simple guess: lines = len(text.split('\n')) # any separated by whitespace words = len(text.split()) # 3.x: bytes is really chars index = self.text.index(INSERT) # str is unicode code points where = tuple(index.split('.')) showinfo('PyEdit Information', 'Current location:\n\n' + 'line:\t%s\ncolumn:\t%s\n\n' % where + 'File text statistics:\n\n' + 'chars:\t%d\nlines:\t%d\nwords:\t%d\n' % (bytes, lines, words)) def onClone(self, makewindow=True): """ open a new edit window without changing one already open (onNew); inherits quit and other behavior of the window that it clones; 2.1: subclass must redefine/replace this if makes its own popup, else this creates a bogus extra window here which will be empty; """ if not makewindow: new = None # assume class makes its own window else: new = Toplevel() # a new edit window in same process myclass = self.__class__ # instance's (lowest) class object myclass(new) # attach/run instance of my class def onRunCode(self, parallelmode=True): """ run Python code being edited--not an IDE, but handy; tries to run in file's dir, not cwd (may be PP4E root); inputs and adds command-line arguments for script files; code's stdin/out/err = editor's start window, if any: run with a console window to see code's print outputs; but parallelmode uses start to open a DOS box for I/O; module search path will include '.' dir where started; in non-file mode, code's Tk root may be PyEdit's window; subprocess or multiprocessing modules may work here too; 2.1: fixed to use base file name after chdir, not path; 2.1: use StartArgs to allow args in file mode on Windows; 2.1: run an update() after 1st dialog else 2nd dialog sometimes does not appear in rare cases; """ def askcmdargs(): return askstring('PyEdit', 'Commandline arguments?') or '' from PP4E.launchmodes import System, Start, StartArgs, Fork filemode = False thefile = str(self.getFileName()) if os.path.exists(thefile): filemode = askyesno('PyEdit', 'Run from file?') self.update() # 2.1: run update() if not filemode: # run text string cmdargs = askcmdargs() namespace = {'__name__': '__main__'} # run as top-level sys.argv = [thefile] + cmdargs.split() # could use threads exec(self.getAllText() + '\n', namespace) # exceptions ignored elif self.text_edit_modified(): # 2.0: changed test showerror('PyEdit', 'Text changed: you must save before run') else: cmdargs = askcmdargs() mycwd = os.getcwd() # cwd may be root dirname, filename = os.path.split(thefile) # get dir, base os.chdir(dirname or mycwd) # cd for filenames thecmd = filename + ' ' + cmdargs # 2.1: not theFile if not parallelmode: # run as file System(thecmd, thecmd)() # block editor else: if sys.platform[:3] == 'win': # spawn in parallel run = StartArgs if cmdargs else Start # 2.1: support args run(thecmd, thecmd)() # or always Spawn else: Fork(thecmd, thecmd)() # spawn in parallel os.chdir(mycwd) # go back to my dir def onPickFont(self): """ 2.0 non-modal font spec dialog 2.1: pass per-dialog inputs to callback, may be > 1 font dialog open """ from PP4E.Gui.ShellGui.formrows import makeFormRow popup = Toplevel(self) popup.title('PyEdit - font') var1 = makeFormRow(popup, label='Family', browse=False) var2 = makeFormRow(popup, label='Size', browse=False) var3 = makeFormRow(popup, label='Style', browse=False) var1.set('courier') var2.set('12') # suggested vals var3.set('bold italic') # see pick list for valid inputs Button(popup, text='Apply', command= lambda: self.onDoFont(var1.get(), var2.get(), var3.get())).pack() def onDoFont(self, family, size, style): try: self.text.config(font=(family, int(size), style)) except: showerror('PyEdit', 'Bad font specification') ############################################################################ # Utilities, useful outside this class ############################################################################ def isEmpty(self): return not self.getAllText() def getAllText(self): return self.text.get('1.0', END+'-1c') # extract text as str string def setAllText(self, text): """ caller: call self.update() first if just packed, else the initial position may be at line 2, not line 1 (2.1; Tk bug?) """ self.text.delete('1.0', END) # store text string in widget self.text.insert(END, text) # or '1.0'; text=bytes or str self.text.mark_set(INSERT, '1.0') # move insert point to top self.text.see(INSERT) # scroll to top, insert set def clearAllText(self): self.text.delete('1.0', END) # clear text in widget def getFileName(self): return self.currfile def setFileName(self, name): # see also: onGoto(linenum) self.currfile = name # for save self.filelabel.config(text=str(name)) def setKnownEncoding(self, encoding='utf-8'): # 2.1: for saves if inserted self.knownEncoding = encoding # else saves use config, ask? def setBg(self, color): self.text.config(bg=color) # to set manually from code def setFg(self, color): self.text.config(fg=color) # 'black', hexstring def setFont(self, font): self.text.config(font=font) # ('family', size, 'style') def setHeight(self, lines): # default = 24h x 80w self.text.config(height=lines) # may also be from textCongif.py def setWidth(self, chars): self.text.config(width=chars) def clearModified(self): self.text.edit_modified(0) # clear modified flag def isModified(self): return self.text_edit_modified() # changed since last reset? def help(self): showinfo('About PyEdit', helptext % ((Version,)*2)) ################################################################################ # Ready-to-use editor classes # mixes in a GuiMaker Frame subclass which builds menu and toolbars # # these classes are common use cases, but other configurations are possible; # call TextEditorMain().mainloop() to start PyEdit as a standalone program; # redefine/extend onQuit in a subclass to catch exit or destroy (see PyView); # caveat: could use windows.py for icons, but quit protocol is custom here. ################################################################################ #------------------------------------------------------------------------------- # 2.1: on quit(), don't silently exit entire app if any other changed edit # windows are open in the process - changes would be lost because all other # windows are closed too, including multiple Tk editor parents; uses a list # to keep track of all PyEdit window instances open in process; this may be # too broad (if we destroy() instead of quit(), need only check check children # of parent being destroyed), but better to err on side of being too inclusive; # onQuit moved here because varies per window type and is not present for all; # # assumes a TextEditorMainPopup is never a parent to other editor windows - # Toplevel children are destroyed with their parents; this does not address # closes outside the scope of PyEdit classes here (tkinter quit is available # on every widget, and any widget type may be a Toplevel parent!); client is # responsible for checking for editor content changes in all uncovered cases; # note that tkinter's <Destroy> bind event won't help here, because its callback # cannot run GUI operations such as text change tests and fetches - see the # book and destroyer.py for more details on this event; #------------------------------------------------------------------------------- ################################### # when text editor owns the window ################################### class TextEditorMain(TextEditor, GuiMakerWindowMenu): """ main PyEdit windows that quit() to exit app on a Quit in GUI, and build a menu on a window; parent may be default Tk, explicit Tk, or Toplevel: parent must be a window, and probably should be a Tk so this isn't silently destoyed and closed with a parent; all main PyEdit windows check all other PyEdit windows open in the process for changes on a Quit in the GUI, since a quit() here will exit the entire app; the editor's frame need not occupy entire window (may have other parts: see PyView), but its Quit ends program; onQuit is run for Quit in toolbar or File menu, as well as window border X; """ def __init__(self, parent=None, loadFirst='', loadEncode=''): # editor fills whole parent window GuiMaker.__init__(self, parent) # use main window menus TextEditor.__init__(self, loadFirst, loadEncode) # GuiMaker frame packs self self.master.title('PyEdit ' + Version) # title, wm X if standalone self.master.iconname('PyEdit') self.master.protocol('WM_DELETE_WINDOW', self.onQuit) TextEditor.editwindows.append(self) def onQuit(self): # on a Quit request in the GUI close = not self.text_edit_modified() # check self, ask?, check others if not close: close = askyesno('PyEdit', 'Text changed: quit and discard changes?') if close: windows = TextEditor.editwindows changed = [w for w in windows if w != self and w.text_edit_modified()] if not changed: GuiMaker.quit(self) # quit ends entire app regardless of widget type else: numchange = len(changed) verify = '%s other edit window%s changed: quit and discard anyhow?' verify = verify % (numchange, 's' if numchange > 1 else '') if askyesno('PyEdit', verify): GuiMaker.quit(self) class TextEditorMainPopup(TextEditor, GuiMakerWindowMenu): """ popup PyEdit windows that destroy() to close only self on a Quit in GUI, and build a menu on a window; makes own Toplevel parent, which is child to default Tk (for None) or other passed-in window or widget (e.g., a frame); adds to list so will be checked for changes if any PyEdit main window quits; if any PyEdit main windows will be created, parent of this should also be a PyEdit main window's parent so this is not closed silently while being tracked; onQuit is run for Quit in toolbar or File menu, as well as window border X; """ def __init__(self, parent=None, loadFirst='', winTitle='', loadEncode=''): # create own window self.popup = Toplevel(parent) GuiMaker.__init__(self, self.popup) # use main window menus TextEditor.__init__(self, loadFirst, loadEncode) # a frame in a new popup assert self.master == self.popup self.popup.title('PyEdit ' + Version + winTitle) self.popup.iconname('PyEdit') self.popup.protocol('WM_DELETE_WINDOW', self.onQuit) TextEditor.editwindows.append(self) def onQuit(self): close = not self.text_edit_modified() if not close: close = askyesno('PyEdit', 'Text changed: quit and discard changes?') if close: self.popup.destroy() # kill this window only TextEditor.editwindows.remove(self) # (plus any child windows) def onClone(self): TextEditor.onClone(self, makewindow=False) # I make my own pop-up ######################################### # when editor embedded in another window ######################################### class TextEditorComponent(TextEditor, GuiMakerFrameMenu): """ attached PyEdit component frames with full menu/toolbar options, which run a destroy() on a Quit in the GUI to erase self only; a Quit in the GUI verifies if any changes in self (only) here; does not intercept window manager border X: doesn't own window; does not add self to changes tracking list: part of larger app; """ def __init__(self, parent=None, loadFirst='', loadEncode=''): # use Frame-based menus GuiMaker.__init__(self, parent) # all menus, buttons on TextEditor.__init__(self, loadFirst, loadEncode) # GuiMaker must init 1st def onQuit(self): close = not self.text_edit_modified() if not close: close = askyesno('PyEdit', 'Text changed: quit and discard changes?') if close: self.destroy() # erase self Frame but do not quit enclosing app class TextEditorComponentMinimal(TextEditor, GuiMakerFrameMenu): """ attached PyEdit component frames without Quit and File menu options; on startup, removes Quit from toolbar, and either deletes File menu or disables all its items (possibly hackish, but sufficient); menu and toolbar structures are per-instance data: changes do not impact others; Quit in GUI never occurs, because it is removed from available options; """ def __init__(self, parent=None, loadFirst='', deleteFile=True, loadEncode=''): self.deleteFile = deleteFile GuiMaker.__init__(self, parent) # GuiMaker frame packs self TextEditor.__init__(self, loadFirst, loadEncode) # TextEditor adds middle def start(self): TextEditor.start(self) # GuiMaker start call for i in range(len(self.toolBar)): # delete quit in toolbar if self.toolBar[i][0] == 'Quit': # delete file menu items, del self.toolBar[i] # or just disable file break if self.deleteFile: for i in range(len(self.menuBar)): if self.menuBar[i][0] == 'File': del self.menuBar[i] break else: for (name, key, items) in self.menuBar: if name == 'File': items.append([1,2,3,4,6]) ################################################################################ # standalone program run ################################################################################ def testPopup(): # see PyView and PyMail for component tests root = Tk() TextEditorMainPopup(root) TextEditorMainPopup(root) Button(root, text='More', command=TextEditorMainPopup).pack(fill=X) Button(root, text='Quit', command=root.quit).pack(fill=X) root.mainloop() def main(): # may be typed or clicked try: # or associated on Windows fname = sys.argv[1] # arg = optional filename except IndexError: # build in default Tk root fname = None TextEditorMain(loadFirst=fname).pack(expand=YES, fill=BOTH) # pack optional mainloop() if __name__ == '__main__': # when run as a script #testPopup() main() # run .pyw for no DOS box
binlogCapturer.py
# Copyright (c) 2020-present ly.com, Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #!/usr/bin/python #-*- coding:utf8 -*- import threading import Queue import ConfigParser import time import os import sys import signal import multiprocessing from binlogReader import BinlogReader from binlogParser import BinlogParser from binlogProcessor import BinlogProcessor from binlogFilter import BinlogFilter #from binlogSender import BinlogSender #from fileSender import BinlogSender from schemaManager import SchemaManager from senderManager import SenderManager # from server import ServerHandler from BaseHTTPServer import HTTPServer,BaseHTTPRequestHandler class BinlogCapturer: def __init__(self, configPath = './conf/config.cfg'): self.configPath = configPath self.config = ConfigParser.ConfigParser() self.config.read(self.configPath) self.loadBinlogReaderTimestamps() self.initSchemaManager() self.initQueues() self.initWorks() self._stop = False def initSchemaManager(self): self.metaServerList = self.config.get('global','metaServer') self.schemaManager = SchemaManager(self.config.get('global','binlogTableID'), self.metaServerList) self.schemaManager.init() self.schemaManager.setSchemaUpdateFunc(self.tableSchemaUpdateFunc) self.schemaManager.setRegionUpdateFunc(self.regionInfoUpdateFunc) def loadBinlogReaderTimestamps(self): self.binlogCheckPointPath = self.config.get('global','binlogCheckPointPath') lastReadTs = 0 for line in open(self.binlogCheckPointPath): ts = int(line) if ts > lastReadTs: lastReadTs = ts self.binlogReadCheckPoint = lastReadTs self.lastUpdateCheckPoint = time.time() def initQueues(self): self.binlogReaderQueue = multiprocessing.Queue(2000) self.binlogParserQueue = multiprocessing.Queue(2000) self.binlogSenderQueue = multiprocessing.Queue(2000) self.binlogCheckPointQueue = multiprocessing.Queue(2000) def initWorks(self): self.binlogReaderDict = {} self.regionID = self.config.getint('global','binlogRegionID') self.regionInfo = self.schemaManager.getRegionInfoByRegionID(self.regionID) self.binlogReader = BinlogReader(self.regionInfo, self.binlogReaderQueue, self.binlogReadCheckPoint, self.schemaManager) self.binlogParser = BinlogParser(self.configPath, self.binlogReaderQueue, self.binlogParserQueue) self.binlogProcessor = BinlogProcessor(self.schemaManager) self.binlogFilter = BinlogFilter(self.config.get('global','filterRuleDict')) self.binlogSender = SenderManager(self.config.get('global','senderConfig'), self.binlogSenderQueue, self.binlogCheckPointQueue) def senderCallBack(self, ts): self.binlogReadCheckPoint = ts tmpFile = self.binlogCheckPointPath + '.tmp' with open(tmpFile, 'w') as f: f.write(str(ts) + '\n') os.rename(tmpFile, self.binlogCheckPointPath) readerSize = self.binlogReaderQueue.qsize() senderSize = self.binlogSenderQueue.qsize() f = open('queueSize.txt','w') f.write(str(readerSize) + '\t' + str(senderSize)) f.close() def processBinlogsThreadFunc(self): while not self._stop: self.binlogProcess() def binlogProcess(self): while not self._stop: item = self.binlogParserQueue.get() self.binlogProcessor.process(item) if self.binlogFilter.filter(item): continue self.binlogSenderQueue.put(item) def startProcessBinlogsThread(self): self.processBinlogThread = threading.Thread(target = self.processBinlogsThreadFunc) self.processBinlogThread.setDaemon(True) self.processBinlogThread.start() def tableSchemaUpdateFunc(self, insertDict, updateDict, deleteDict): #当table schema更新时,此函数被调用 return for tableID, schema in insertDict.items(): self.binlogParser.updateTableSchema(schema) for tableID, schema in updateDict.items(): self.binlogParser.updateTableSchema(schema) def regionInfoUpdateFunc(self, insertDict, updateDict, deleteDict): #当binlog的region变更时,此函数被调用 #若binlog store 切主,需要更新binlogStore的storeClient for rid, regionInfo in updateDict.items(): if rid != self.regionID: continue self.binlogReader.updateRegionInfo(regionInfo) def updateCheckpointThreadFunc(self): while True: checkpoint = self.binlogCheckPointQueue.get() if checkpoint == 0: continue self.binlogReadCheckPoint = checkpoint tmpFile = self.binlogCheckPointPath + '.tmp' with open(tmpFile, 'w') as f: f.write(str(self.binlogReadCheckPoint) + '\n') os.rename(tmpFile, self.binlogCheckPointPath) # readerSize = self.binlogReaderQueue.qsize() # parserSize = self.binlogParserQueue.qsize() # senderSize = self.binlogSenderQueue.qsize() # print "%d\t%d\t%d\t%d" % (readerSize, parserSize, senderSize, self.binlogReadCheckPoint) # f = open('queueSize.txt','w') # f.write(str(readerSize) + '\t' + str(senderSize)) # f.close() def startUpdateCheckpointThread(self): self.updateCheckpointThread = threading.Thread(target = self.updateCheckpointThreadFunc) self.updateCheckpointThread.start() def start(self): self.schemaManager.start() self.binlogReader.start() self.binlogParser.start() self.binlogSender.start() self.httpThread = threading.Thread(target=self.httpFunc) self.httpThread.setDaemon(True) self.httpThread.start() self.startProcessBinlogsThread() self.startUpdateCheckpointThread() self.threadMonitor() def stop(self): self.binlogReader.stop() self.binlogParser.terminate() self.binlogSender.terminate() pid = os.getpid() os.kill(pid, signal.SIGKILL) def threadMonitor(self): self.threadList = [] self.threadList.append(self.processBinlogThread) self.threadList.append(self.binlogReader) while True: if not self.binlogReader.isAlive(): self.stop() if not self.binlogParser.isAlive(): self.stop() if not self.binlogSender.isAlive(): self.stop() time.sleep(1) def httpFunc(self): self.port = int(self.config.get('global','httpPort')) print self.port self.http_server = HTTPServer(('0.0.0.0',self.port),ServerHandler) self.http_server.serve_forever() capturer = BinlogCapturer() class ServerHandler(BaseHTTPRequestHandler): def do_GET(self): print "get" self.send_response(200) self.send_header('Content-type', 'application/json') self.end_headers() wor = '[status]\n\n' wor += 'binlogReaderQueue.size:' + str(capturer.binlogReaderQueue.qsize()) + '\n' wor += 'binlogParserQueue.size:' + str(capturer.binlogParserQueue.qsize()) + '\n' wor += 'binlogSenderQueue.size:' + str(capturer.binlogSenderQueue.qsize()) + '\n' wor += 'binlogReadCheckPoint:' + str(capturer.binlogReadCheckPoint) + '\n' self.wfile.write(wor) def signal_kill_handler(signum, handler): print "kill:",signum capturer.stop() def register_signal_handler(): signal.signal(signal.SIGINT, signal_kill_handler) signal.signal(signal.SIGCHLD, signal_kill_handler) #signal.signal(signal.SIGKILL, signal_kill_handler) if __name__ == '__main__': register_signal_handler() print os.getpid() capturer.start()
scripts.py
# -*- coding: utf-8 -*- ''' This module contains the function calls to execute command line scripts ''' # Import python libs from __future__ import absolute_import, print_function import os import sys import time import logging import threading import traceback from random import randint # Import salt libs from salt.exceptions import SaltSystemExit, SaltClientError, SaltReqTimeoutError import salt.defaults.exitcodes # pylint: disable=unused-import log = logging.getLogger(__name__) def _handle_interrupt(exc, original_exc, hardfail=False, trace=''): ''' if hardfailing: If we got the original stacktrace, log it If all cases, raise the original exception but this is logically part the initial stack. else just let salt exit gracefully ''' if hardfail: if trace: log.error(trace) raise original_exc else: raise exc def salt_master(): ''' Start the salt master. ''' import salt.cli.daemons master = salt.cli.daemons.Master() master.start() def minion_process(): ''' Start a minion process ''' import salt.utils import salt.cli.daemons # salt_minion spawns this function in a new process salt.utils.appendproctitle('KeepAlive') def suicide_when_without_parent(parent_pid): ''' Have the minion suicide if the parent process is gone NOTE: small race issue where the parent PID could be replace with another process with same PID! ''' while True: time.sleep(5) try: # check pid alive (Unix only trick!) if os.getuid() == 0 and not salt.utils.is_windows(): os.kill(parent_pid, 0) except OSError as exc: # forcibly exit, regular sys.exit raises an exception-- which # isn't sufficient in a thread log.error('Minion process encountered exception: {0}'.format(exc)) os._exit(salt.defaults.exitcodes.EX_GENERIC) if not salt.utils.is_windows(): thread = threading.Thread(target=suicide_when_without_parent, args=(os.getppid(),)) thread.start() minion = salt.cli.daemons.Minion() try: minion.start() except (SaltClientError, SaltReqTimeoutError, SaltSystemExit) as exc: log.warning('Fatal functionality error caught by minion handler:\n', exc_info=True) log.warning('** Restarting minion **') delay = 60 if minion is not None and hasattr(minion, 'config'): delay = minion.config.get('random_reauth_delay', 60) delay = randint(1, delay) log.info('waiting random_reauth_delay {0}s'.format(delay)) time.sleep(delay) sys.exit(salt.defaults.exitcodes.SALT_KEEPALIVE) def salt_minion(): ''' Start the salt minion in a subprocess. Auto restart minion on error. ''' import signal import functools import salt.cli.daemons import multiprocessing if '' in sys.path: sys.path.remove('') if salt.utils.is_windows(): minion = salt.cli.daemons.Minion() minion.start() return if '--disable-keepalive' in sys.argv: sys.argv.remove('--disable-keepalive') minion = salt.cli.daemons.Minion() minion.start() return def escalate_signal_to_process(pid, signum, sigframe): # pylint: disable=unused-argument ''' Escalate the signal received to the multiprocessing process that is actually running the minion ''' # escalate signal os.kill(pid, signum) # keep one minion subprocess running prev_sigint_handler = signal.getsignal(signal.SIGINT) prev_sigterm_handler = signal.getsignal(signal.SIGTERM) while True: try: process = multiprocessing.Process(target=minion_process) process.start() signal.signal(signal.SIGTERM, functools.partial(escalate_signal_to_process, process.pid)) signal.signal(signal.SIGINT, functools.partial(escalate_signal_to_process, process.pid)) except Exception: # pylint: disable=broad-except # if multiprocessing does not work minion = salt.cli.daemons.Minion() minion.start() break process.join() # Process exited or was terminated. Since we're going to try to restart # it, we MUST, reset signal handling to the previous handlers signal.signal(signal.SIGINT, prev_sigint_handler) signal.signal(signal.SIGTERM, prev_sigterm_handler) if not process.exitcode == salt.defaults.exitcodes.SALT_KEEPALIVE: sys.exit(process.exitcode) # ontop of the random_reauth_delay already preformed # delay extra to reduce flooding and free resources # NOTE: values are static but should be fine. time.sleep(2 + randint(1, 10)) # need to reset logging because new minion objects # cause extra log handlers to accumulate rlogger = logging.getLogger() for handler in rlogger.handlers: rlogger.removeHandler(handler) logging.basicConfig() def proxy_minion_process(queue): ''' Start a proxy minion process ''' import salt.cli.daemons # salt_minion spawns this function in a new process def suicide_when_without_parent(parent_pid): ''' Have the minion suicide if the parent process is gone NOTE: there is a small race issue where the parent PID could be replace with another process with the same PID! ''' while True: time.sleep(5) try: # check pid alive (Unix only trick!) os.kill(parent_pid, 0) except OSError: # forcibly exit, regular sys.exit raises an exception-- which # isn't sufficient in a thread os._exit(999) if not salt.utils.is_windows(): thread = threading.Thread(target=suicide_when_without_parent, args=(os.getppid(),)) thread.start() restart = False proxyminion = None status = salt.defaults.exitcodes.EX_OK try: proxyminion = salt.cli.daemons.ProxyMinion() proxyminion.start() except (Exception, SaltClientError, SaltReqTimeoutError, SaltSystemExit) as exc: log.error('Proxy Minion failed to start: ', exc_info=True) restart = True # status is superfluous since the process will be restarted status = salt.defaults.exitcodes.SALT_KEEPALIVE except SystemExit as exc: restart = False status = exc.code if restart is True: log.warning('** Restarting proxy minion **') delay = 60 if proxyminion is not None: if hasattr(proxyminion, 'config'): delay = proxyminion.config.get('random_reauth_delay', 60) random_delay = randint(1, delay) log.info('Sleeping random_reauth_delay of {0} seconds'.format(random_delay)) # preform delay after minion resources have been cleaned queue.put(random_delay) else: queue.put(0) sys.exit(status) def salt_proxy_minion(): ''' Start a proxy minion. ''' import salt.cli.daemons import multiprocessing if '' in sys.path: sys.path.remove('') if salt.utils.is_windows(): proxyminion = salt.cli.daemons.ProxyMinion() proxyminion.start() return if '--disable-keepalive' in sys.argv: sys.argv.remove('--disable-keepalive') proxyminion = salt.cli.daemons.ProxyMinion() proxyminion.start() return # keep one minion subprocess running while True: try: queue = multiprocessing.Queue() except Exception: # This breaks in containers proxyminion = salt.cli.daemons.ProxyMinion() proxyminion.start() return process = multiprocessing.Process(target=proxy_minion_process, args=(queue,)) process.start() try: process.join() try: restart_delay = queue.get(block=False) except Exception: if process.exitcode == 0: # Minion process ended naturally, Ctrl+C or --version break restart_delay = 60 if restart_delay == 0: # Minion process ended naturally, Ctrl+C, --version, etc. sys.exit(process.exitcode) # delay restart to reduce flooding and allow network resources to close time.sleep(restart_delay) except KeyboardInterrupt: break # need to reset logging because new minion objects # cause extra log handlers to accumulate rlogger = logging.getLogger() for handler in rlogger.handlers: rlogger.removeHandler(handler) logging.basicConfig() def salt_syndic(): ''' Start the salt syndic. ''' import salt.cli.daemons pid = os.getpid() try: syndic = salt.cli.daemons.Syndic() syndic.start() except KeyboardInterrupt: os.kill(pid, 15) def salt_key(): ''' Manage the authentication keys with salt-key. ''' import salt.cli.key client = None try: client = salt.cli.key.SaltKey() client.run() except KeyboardInterrupt as err: trace = traceback.format_exc() try: hardcrash = client.options.hard_crash except (AttributeError, KeyError): hardcrash = False _handle_interrupt( SystemExit('\nExiting gracefully on Ctrl-c'), err, hardcrash, trace=trace) except Exception as err: sys.stderr.write("Error: {0}\n".format(err.message)) def salt_cp(): ''' Publish commands to the salt system from the command line on the master. ''' import salt.cli.cp client = None try: client = salt.cli.cp.SaltCPCli() client.run() except KeyboardInterrupt as err: trace = traceback.format_exc() try: hardcrash = client.options.hard_crash except (AttributeError, KeyError): hardcrash = False _handle_interrupt( SystemExit('\nExiting gracefully on Ctrl-c'), err, hardcrash, trace=trace) def salt_call(): ''' Directly call a salt command in the modules, does not require a running salt minion to run. ''' import salt.cli.call if '' in sys.path: sys.path.remove('') client = None try: client = salt.cli.call.SaltCall() client.run() except KeyboardInterrupt as err: trace = traceback.format_exc() try: hardcrash = client.options.hard_crash except (AttributeError, KeyError): hardcrash = False _handle_interrupt( SystemExit('\nExiting gracefully on Ctrl-c'), err, hardcrash, trace=trace) def salt_run(): ''' Execute a salt convenience routine. ''' import salt.cli.run if '' in sys.path: sys.path.remove('') client = None try: client = salt.cli.run.SaltRun() client.run() except KeyboardInterrupt as err: trace = traceback.format_exc() try: hardcrash = client.options.hard_crash except (AttributeError, KeyError): hardcrash = False _handle_interrupt( SystemExit('\nExiting gracefully on Ctrl-c'), err, hardcrash, trace=trace) def salt_ssh(): ''' Execute the salt-ssh system ''' import salt.cli.ssh if '' in sys.path: sys.path.remove('') client = None try: client = salt.cli.ssh.SaltSSH() client.run() except KeyboardInterrupt as err: trace = traceback.format_exc() try: hardcrash = client.options.hard_crash except (AttributeError, KeyError): hardcrash = False _handle_interrupt( SystemExit('\nExiting gracefully on Ctrl-c'), err, hardcrash, trace=trace) except SaltClientError as err: trace = traceback.format_exc() try: hardcrash = client.options.hard_crash except (AttributeError, KeyError): hardcrash = False _handle_interrupt( SystemExit(err), err, hardcrash, trace=trace) def salt_cloud(): ''' The main function for salt-cloud ''' try: # Late-imports for CLI performance import salt.cloud import salt.cloud.cli has_saltcloud = True except ImportError as e: log.error("Error importing salt cloud {0}".format(e)) # No salt cloud on Windows has_saltcloud = False if '' in sys.path: sys.path.remove('') if not has_saltcloud: print('salt-cloud is not available in this system') sys.exit(salt.defaults.exitcodes.EX_UNAVAILABLE) client = None try: client = salt.cloud.cli.SaltCloud() client.run() except KeyboardInterrupt as err: trace = traceback.format_exc() try: hardcrash = client.options.hard_crash except (AttributeError, KeyError): hardcrash = False _handle_interrupt( SystemExit('\nExiting gracefully on Ctrl-c'), err, hardcrash, trace=trace) def salt_api(): ''' The main function for salt-api ''' import salt.cli.api sapi = salt.cli.api.SaltAPI() # pylint: disable=E1120 sapi.start() def salt_main(): ''' Publish commands to the salt system from the command line on the master. ''' import salt.cli.salt if '' in sys.path: sys.path.remove('') client = None try: client = salt.cli.salt.SaltCMD() client.run() except KeyboardInterrupt as err: trace = traceback.format_exc() try: hardcrash = client.options.hard_crash except (AttributeError, KeyError): hardcrash = False _handle_interrupt( SystemExit('\nExiting gracefully on Ctrl-c'), err, hardcrash, trace=trace) def salt_spm(): ''' The main function for spm, the Salt Package Manager .. versionadded:: 2015.8.0 ''' import salt.cli.spm spm = salt.cli.spm.SPM() # pylint: disable=E1120 spm.run()
test.py
import json import select import time import logging import os import threading from typing import Callable import aicrowd_helper import gym import minerl import abc import numpy as np import coloredlogs coloredlogs.install(logging.DEBUG) # our dependencies import joblib import sys sys.path.append(os.path.abspath(os.path.join(__file__, os.pardir, 'mod'))) from dqn_family import get_agent from env_wrappers import wrap_env GPU = -1 ARCH = 'distributed_dueling' NOISY_NET_SIGMA = 0.5 FINAL_EPSILON = 0.01 FINAL_EXPLORATION_FRAMES = 10 ** 6 LR = 0.0000625 ADAM_EPS = 0.00015 PRIORITIZED = True UPDATE_INTERVAL = 4 REPLAY_CAPACITY = 300000 NUM_STEP_RETURN = 10 AGENT_TYPE = 'CategoricalDoubleDQN' GAMMA = 0.99 REPLAY_START_SIZE = 5000 TARGET_UPDATE_INTERVAL = 10000 CLIP_DELTA = True BATCH_ACCUMULATOR = 'mean' FRAME_SKIP = 4 GRAY_SCALE = False FRAME_STACK = 4 RANDOMIZE_ACTION = NOISY_NET_SIGMA is None EVAL_EPSILON = 0.001 maximum_frames = 8000000 STEPS = maximum_frames // FRAME_SKIP # All the evaluations will be evaluated on MineRLObtainDiamondVectorObf-v0 environment MINERL_GYM_ENV = os.getenv('MINERL_GYM_ENV', 'MineRLObtainDiamondVectorObf-v0') MINERL_MAX_EVALUATION_EPISODES = int(os.getenv('MINERL_MAX_EVALUATION_EPISODES', 5)) # Parallel testing/inference, **you can override** below value based on compute # requirements, etc to save OOM in this phase. EVALUATION_THREAD_COUNT = int(os.getenv('EPISODES_EVALUATION_THREAD_COUNT', 2)) class EpisodeDone(Exception): pass class Episode(gym.Env): """A class for a single episode. """ def __init__(self, env): self.env = env self.action_space = env.action_space self.observation_space = env.observation_space self._done = False def reset(self): if not self._done: return self.env.reset() def step(self, action): s,r,d,i = self.env.step(action) if d: self._done = True raise EpisodeDone() else: return s,r,d,i # DO NOT CHANGE THIS CLASS, THIS IS THE BASE CLASS FOR YOUR AGENT. class MineRLAgentBase(abc.ABC): """ To compete in the competition, you are required to implement a SUBCLASS to this class. YOUR SUBMISSION WILL FAIL IF: * Rename this class * You do not implement a subclass to this class This class enables the evaluator to run your agent in parallel, so you should load your model only once in the 'load_agent' method. """ @abc.abstractmethod def load_agent(self): """ This method is called at the beginning of the evaluation. You should load your model and do any preprocessing here. THIS METHOD IS ONLY CALLED ONCE AT THE BEGINNING OF THE EVALUATION. DO NOT LOAD YOUR MODEL ANYWHERE ELSE. """ raise NotImplementedError() @abc.abstractmethod def run_agent_on_episode(self, single_episode_env : Episode): """This method runs your agent on a SINGLE episode. You should just implement the standard environment interaction loop here: obs = env.reset() while not done: env.step(self.agent.act(obs)) ... NOTE: This method will be called in PARALLEL during evaluation. So, only store state in LOCAL variables. For example, if using an LSTM, don't store the hidden state in the class but as a local variable to the method. Args: env (gym.Env): The env your agent should interact with. """ raise NotImplementedError() ####################### # YOUR CODE GOES HERE # ####################### class MineRLRainbowBaselineAgent(MineRLAgentBase): def __init__(self, env): self.env = env def load_agent(self): self.agent = get_agent( n_actions=self.env.action_space.n, arch=ARCH, n_input_channels=self.env.observation_space.shape[0], noisy_net_sigma=NOISY_NET_SIGMA, final_epsilon=FINAL_EPSILON, final_exploration_frames=FINAL_EXPLORATION_FRAMES, explorer_sample_func=self.env.action_space.sample, lr=LR, adam_eps=ADAM_EPS, prioritized=PRIORITIZED, steps=STEPS, update_interval=UPDATE_INTERVAL, replay_capacity=REPLAY_CAPACITY, num_step_return=NUM_STEP_RETURN, agent_type=AGENT_TYPE, gpu=GPU, gamma=GAMMA, replay_start_size=REPLAY_START_SIZE, target_update_interval=TARGET_UPDATE_INTERVAL, clip_delta=CLIP_DELTA, batch_accumulator=BATCH_ACCUMULATOR, ) self.agent.load(os.path.abspath(os.path.join(__file__, os.pardir, 'train'))) def run_agent_on_episode(self, single_episode_env: Episode): with self.agent.eval_mode(): obs = single_episode_env.reset() while True: a = self.agent.act(obs) obs, r, done, info = single_episode_env.step(a) ##################################################################### # IMPORTANT: SET THIS VARIABLE WITH THE AGENT CLASS YOU ARE USING # ###################################################################### AGENT_TO_TEST = MineRLRainbowBaselineAgent # MineRLMatrixAgent, MineRLRandomAgent, YourAgentHere #################### # EVALUATION CODE # #################### def main(): assert MINERL_MAX_EVALUATION_EPISODES > 0 assert EVALUATION_THREAD_COUNT > 0 # Create the parallel envs (sequentially to prevent issues!) kmeans = joblib.load(os.path.abspath(os.path.join(__file__, os.pardir, 'train', 'kmeans.joblib'))) def wrapper(env): return wrap_env( env=env, test=True, monitor=False, outdir=None, frame_skip=FRAME_SKIP, gray_scale=GRAY_SCALE, frame_stack=FRAME_STACK, randomize_action=RANDOMIZE_ACTION, eval_epsilon=EVAL_EPSILON, action_choices=kmeans.cluster_centers_, ) envs = [wrapper(gym.make(MINERL_GYM_ENV)) for _ in range(EVALUATION_THREAD_COUNT)] # envs = [gym.make(MINERL_GYM_ENV) for _ in range(EVALUATION_THREAD_COUNT)] agent = AGENT_TO_TEST(envs[0]) # agent = AGENT_TO_TEST() assert isinstance(agent, MineRLAgentBase) agent.load_agent() episodes_per_thread = [MINERL_MAX_EVALUATION_EPISODES // EVALUATION_THREAD_COUNT for _ in range(EVALUATION_THREAD_COUNT)] episodes_per_thread[-1] += MINERL_MAX_EVALUATION_EPISODES - EVALUATION_THREAD_COUNT *(MINERL_MAX_EVALUATION_EPISODES // EVALUATION_THREAD_COUNT) # A simple funciton to evaluate on episodes! def evaluate(i, env): print("[{}] Starting evaluator.".format(i)) for i in range(episodes_per_thread[i]): try: agent.run_agent_on_episode(Episode(env)) except EpisodeDone: print("[{}] Episode complete".format(i)) pass evaluator_threads = [threading.Thread(target=evaluate, args=(i, envs[i])) for i in range(EVALUATION_THREAD_COUNT)] for thread in evaluator_threads: thread.start() # wait fo the evaluation to finish for thread in evaluator_threads: thread.join() if __name__ == "__main__": main()
heartbeat.py
import sys import datetime import json import time from multiprocessing import Process from .repository import repository_for_url, Repository from .metadata import rfc3339_datetime DEFAULT_REFRESH_INTERVAL = datetime.timedelta(seconds=10) class Heartbeat: def __init__( self, experiment_id: str, repository_url: str, path: str, refresh_interval: datetime.timedelta = DEFAULT_REFRESH_INTERVAL, ): self.experiment_id = experiment_id self.repository_url = repository_url self.path = path self.refresh_interval = refresh_interval self.process = self.make_process() def start(self): self.process.start() def ensure_running(self): if not self.is_alive(): self.process = self.make_process() self.process.start() def kill(self): self.process.terminate() def is_alive(self): return self.process.is_alive() def make_process(self) -> Process: process = Process(target=self.heartbeat_loop) process.daemon = True return process def heartbeat_loop(self): # need to instantitate repository here since the gcs # client doesn't like multiprocessing: # https://github.com/googleapis/google-cloud-python/issues/3501 repository = repository_for_url(self.repository_url) while True: self.refresh(repository) time.sleep(self.refresh_interval.total_seconds()) def refresh(self, repository: Repository): obj = json.dumps( { "experiment_id": self.experiment_id, "last_heartbeat": rfc3339_datetime(datetime.datetime.utcnow()), } ) try: repository.put(self.path, obj) except Exception as e: # pylint: disable=broad-except sys.stderr.write("Failed to save heartbeat: {}".format(e))
1507834892-1to7-Host.py
import sys import time import threading import traceback import pythonsv_icx_handler as itp_sv from MiddleWare import lib_wmi_handler from MiddleWare import lib_flash_server as lfs from MiddleWare import lib_power_action_soundwave as lpa from MiddleWare.lib_bios_config import BiosMenuConfig from SoftwareAbstractionLayer import utils from SoftwareAbstractionLayer import library from SoftwareAbstractionLayer import lib_constants # 1507834892 [Pre-Si & Post-Si] Verify GV is disabled when TME is enable # rev.18 # Constants Definition TEST_CASE_ID = "1507834892" SCRIPT_ID = "1507834892-1to7-Host.py" IS_CASE_PASS = True STEP_NO = 1 FAIL_COLLECT = [] # Variants Definition opt_wait_time = 5 os_boot_timeout = 120 boot_wait_timeout = 600 f2_timeout = 20 sut_host = utils.ReadConfig('SUT_IP', 'target_sut_ip') usb_drive_label = utils.ReadConfig('USB Drive', 'DRIVE_LETTER') usb_drive_alias = utils.ReadConfig('USB Drive', 'EFI_ALIAS') ifwi_release = utils.ReadConfig('IFWI_IMAGES', 'RELEASE') mr_2_notme = int(utils.ReadConfig('1507834892', 'MAX_RATIO_2_NO_TME'), 16) gr_2_notme = int(utils.ReadConfig('1507834892', 'GUARANTEED_RATIO_2_NO_TME'), 16) mer_2_notme = int(utils.ReadConfig('1507834892', 'MAX_EFFICIENCY_RATIO_2_NO_TME'), 16) mr_2_tme = int(utils.ReadConfig('1507834892', 'MAX_RATIO_2_TME'), 16) gr_2_tme = int(utils.ReadConfig('1507834892', 'GUARANTEED_RATIO_2_TME'), 16) mer_2_tme = int(utils.ReadConfig('1507834892', 'MAX_EFFICIENCY_RATIO_2_TME'), 16) soundwave_port = utils.ReadConfig('SOUNDWAVE', 'PORT') wh = lib_wmi_handler.WmiHandler() bios_conf = BiosMenuConfig(TEST_CASE_ID, SCRIPT_ID) # Test Case Steps Abstraction def result_process(result, step_string, test_exit=False, is_step_complete=True): global STEP_NO if not result: global IS_CASE_PASS IS_CASE_PASS = False if is_step_complete: print('#' * 160) library.write_log(lib_constants.LOG_FAIL, "Step %d: Failed to %s" % (STEP_NO, step_string), TEST_CASE_ID, SCRIPT_ID) print('#' * 160) global FAIL_COLLECT FAIL_COLLECT.append((STEP_NO, step_string)) STEP_NO += 1 else: print('#' * 160) library.write_log(lib_constants.LOG_FAIL, "Failed to %s" % step_string, TEST_CASE_ID, SCRIPT_ID) print('#' * 160) if test_exit: sys.exit(lib_constants.EXIT_FAILURE) else: if is_step_complete: print('#' * 160) library.write_log(lib_constants.LOG_INFO, "Step %d: Succeed to %s" % (STEP_NO, step_string), TEST_CASE_ID, SCRIPT_ID) print('#' * 160) STEP_NO += 1 else: print('#' * 160) library.write_log(lib_constants.LOG_INFO, "Succeed to %s" % step_string, TEST_CASE_ID, SCRIPT_ID) print('#' * 160) def log_write(result, info): if result == "PASS": library.write_log(lib_constants.LOG_PASS, 'Passed: %s' % (info), TEST_CASE_ID, SCRIPT_ID) sys.exit(lib_constants.EXIT_SUCCESS) elif result == "FAIL": library.write_log(lib_constants.LOG_FAIL, 'Failed: %s' % (info), TEST_CASE_ID, SCRIPT_ID) sys.exit(lib_constants.EXIT_FAILURE) elif result == "INFO": library.write_log(lib_constants.LOG_INFO, 'Status: %s' % (info), TEST_CASE_ID, SCRIPT_ID) return True elif result == "DEBUG": library.write_log(lib_constants.LOG_DEBUG, 'Debug: %s' % (info), TEST_CASE_ID, SCRIPT_ID) return True elif result == "WARNING": library.write_log(lib_constants.LOG_WARNING, 'Warning: %s' % (info), TEST_CASE_ID, SCRIPT_ID) return True else: library.write_log(lib_constants.LOG_ERROR, 'Error: %s' % (info), TEST_CASE_ID, SCRIPT_ID) return False def is_boot_state(): try: result = wh.wmi_os_opt(local=False, os_instruct="name") if "Windows" in result[0]: return "windows" else: return "na" except Exception: bios_conf.bios_control_key_press('ESC', 2, 3) is_efi = bios_conf.efi_shell_cmd("") if "Shell>" in is_efi: return "efi" elif "\\>" in is_efi: return "efi_fs" else: bios_conf.bios_control_key_press('ESC', 2, 2) result = bios_conf.bios_back_home() if result: return "bios" else: return "unknown" def tear_down(): sut_state = is_boot_state() if sut_state == "windows": wh.wmi_os_opt(local=False, os_instruct="shutdown") log_write("INFO", "Tear Down: SUT is under %s state, perform G3" % sut_state) lpa.ac_off(soundwave_port) time.sleep(5) def bios_init_opr(): sut_state = is_boot_state() log_write('INFO', 'SUT is under %s state' % sut_state) if sut_state == 'bios': return True elif "efi" in sut_state: bios_conf.bios_control_key_press('CTRL_ALT_DELETE') enter_bios = bios_conf.enter_bios(boot_wait_timeout, f2_timeout) return enter_bios elif sut_state == 'windows': try: wh.wmi_os_opt(local=False, os_instruct="reboot") enter_bios = bios_conf.enter_bios(boot_wait_timeout, f2_timeout) return enter_bios except Exception: return False else: lpa.ac_off(soundwave_port) time.sleep(5) lpa.ac_on(soundwave_port) enter_bios = bios_conf.enter_bios(boot_wait_timeout, f2_timeout) return enter_bios def reset_button(): try: lpa.ac_off(soundwave_port) time.sleep(5) lpa.ac_on(soundwave_port) time.sleep(os_boot_timeout) return True except Exception: return False def os_boot_check(round=1): for i in range(round): try: time.sleep(os_boot_timeout) result = wh.wmi_os_opt(local=False, os_instruct="name") log_write("INFO", "OS boot successfully.") return True except Exception: result = reset_button() if result: log_write("INFO", "OS reset triggered cycle %s" % i) return False def test_flash_ifwi(image_for_flash, port='COM101', step_string="Flash the latest BIOS and boot to setup menu", complete=True): os_state = is_boot_state() if os_state == "windows": wh.wmi_os_opt(local=False, os_instruct="shutdown") try: lfs.flashifwi_em100(binfile=image_for_flash, soundwave_port=port) lpa.ac_on(port) time.sleep(20) log_write('INFO', "IFWI flashed successfully with: %s" % image_for_flash) except Exception: result_process(False, step_string, test_exit=True, is_step_complete=complete) enter_bios = bios_conf.enter_bios(boot_wait_timeout, f2_timeout) result_process(enter_bios, step_string, test_exit=True, is_step_complete=complete) def test_boot_to_setup(step_string="Boot to BIOS Menu", complete=True): bios_boot = bios_init_opr() result_process(bios_boot, step_string, test_exit=True, is_step_complete=complete) def itp_ctrl(status="open"): if status == "open": itp_sv.pythonsv_init() return itp, sv elif status == "close": itp_sv.pythonsv_exit() return True else: return False def test_cpuid(id=0x7, idx=0, target="ecx", step_string="reading CPUID: ", complete=False): try: result = cpuid(id, idx) log_write("INFO", "cpuid %s, %s is %s" % (id, idx, result)) target_val = result[target] log_write("INFO", "%s result is %s" % (target, target_val)) result_process(True, "%s %s" % (step_string, id), test_exit=True, is_step_complete=complete) return target_val except Exception: result_process(False, "%s %s" % (step_string, id), test_exit=True, is_step_complete=complete) def test_get_gv_state(initial_step=False): itp, sv = itp_sv.pythonsv_init() result_process(True, "Initialize ITP environment.", test_exit=True, is_step_complete=initial_step) itp.unlock() itp.forcereconfig() sv.refresh() tme_active = sv.socket0.uncore.memss.mc0.ch0.tme.tme_activate.show() print(tme_active) max_ratio = sv.socket0.pcudata.global_max_ratio_2 grtee_ratio = sv.socket0.pcudata.global_guaranteed_ratio_2 effect_ratio = sv.socket0.pcudata.global_max_efficiency_ratio_2 itp_sv.pythonsv_exit() return max_ratio, grtee_ratio, effect_ratio def test_itp_msr(id=0x982, idx=0, step_string="reading itp.threads.msr MSR: ", complete=False): try: result = itp.threads[idx].msr(id) return result except Exception: result_process(False, "%s %s" % (step_string, id), test_exit=True, is_step_complete=complete) def test_msr(id=0x35, step_string="reading MSR: ", complete=False): try: result = msr(id) return result except Exception: result_process(False, "%s %s" % (step_string, id), test_exit=True, is_step_complete=complete) def test_bios_reset(flag=True, step_string="Save, reset, boot to BIOS", complete=True): boot_state = is_boot_state() if boot_state == 'bios': result = bios_conf.reset_to_bios(to_save=flag, wait_timeout=boot_wait_timeout, f2_press_wait=f2_timeout) result_process(result, step_string, test_exit=True, is_step_complete=complete) else: result_process(False, "%s: SUT is under %s" % (step_string, boot_state), test_exit=True, is_step_complete=complete) def test_max_mktme_keys_get(verdict="0x3f", step_string="EDKII -> Socket Configuration -> Processor Configuration -> Max MKTME keys: ", complete=True): boot_state = is_boot_state() if boot_state == 'bios': bios_conf.bios_menu_navi(["EDKII Menu", "Socket Configuration", "Processor Configuration"], wait_time=opt_wait_time) result = bios_conf.get_system_information("Max MKTME Keys") result_process(verdict in result, "%s %s" % (step_string, result), test_exit=True, is_step_complete=complete) else: result_process(False, "%s: SUT is under %s" % (step_string, boot_state), test_exit=True, is_step_complete=complete) def test_aesni_set(value="Enable", step_string="EDKII -> Socket Configuration -> Processor Configuration -> AES-NI: ", complete=True): boot_state = is_boot_state() if boot_state == 'bios': bios_conf.bios_menu_navi(["EDKII Menu", "Socket Configuration", "Processor Configuration"], wait_time=opt_wait_time) result = bios_conf.bios_opt_drop_down_menu_select('AES-NI', value) bios_conf.bios_save_changes() result_process(result, "%s %s" % (step_string, value), test_exit=True, is_step_complete=complete) else: result_process(False, "%s: SUT is under %s" % (step_string, boot_state), test_exit=True, is_step_complete=complete) def test_tme_set(value="Enable", step_string="EDKII -> Socket Configuration -> Processor Configuration -> Total Memory Encryption (TME): ", complete=True): boot_state = is_boot_state() if boot_state == 'bios': bios_conf.bios_menu_navi(["EDKII Menu", "Socket Configuration", "Processor Configuration"], wait_time=opt_wait_time) result = bios_conf.bios_opt_drop_down_menu_select('Total Memory Encryption (TME)', value) bios_conf.bios_save_changes() result_process(result, "%s %s" % (step_string, value), test_exit=True, is_step_complete=complete) else: result_process(False, "%s: SUT is under %s" % (step_string, boot_state), test_exit=True, is_step_complete=complete) def test_mktme_set(value="Enable", step_string="EDKII -> Socket Configuration -> Processor Configuration -> Multi-Key Total Memory Encryption (MK-TME): ", complete=True): boot_state = is_boot_state() if boot_state == 'bios': bios_conf.bios_menu_navi(["EDKII Menu", "Socket Configuration", "Processor Configuration"], wait_time=opt_wait_time) result = bios_conf.bios_opt_drop_down_menu_select('Multikey Total Memory Encryption (MK-TME)', value) bios_conf.bios_save_changes() bios_conf.bios_back_home() result_process(result, "%s %s" % (step_string, value), test_exit=True, is_step_complete=complete) else: result_process(False, "%s: SUT is under %s" % (step_string, boot_state), test_exit=True, is_step_complete=complete) def callback_logging(): result_process(False, "Test case execution terminated due to timeout occurred", test_exit=True, is_step_complete=False) def time_out(interval, callback=None): def decorator(func): def wrapper(*args, **kwargs): t = threading.Thread(target=func, args=args, kwargs=kwargs) t.setDaemon(True) t.start() t.join(interval) if t.is_alive() and callback: return threading.Timer(0, callback).start() else: return return wrapper return decorator @time_out(3600, callback_logging) # Test Case Execution def test_execution(): # Test Run Start test_flash_ifwi(ifwi_release, complete=False) test_boot_to_setup(step_string="Flash the latest BIOS and boot to setup menu", complete=True) a, b, c = test_get_gv_state(initial_step=True) print(a, b, c) result = [a == mr_2_notme, b == gr_2_notme, c == mer_2_notme] result_process(False not in result, "Check GV status via PythonSV before TME set", test_exit=False, is_step_complete=True) test_tme_set() test_bios_reset(complete=True) a, b, c = test_get_gv_state(initial_step=True) print(a, b, c) result = [a == mr_2_tme, b == gr_2_tme, c == mer_2_tme] result_process(False not in result, "Check GV status via PythonSV after TME set", test_exit=False, is_step_complete=True) if __name__ == "__main__": try: test_execution() except Exception: result_process(False, "Exception Occurred: \r\n %s" % (traceback.format_exc()), test_exit=True, is_step_complete=True) finally: tear_down() log_write('INFO', "%s steps executed with result verdict %s" % (STEP_NO - 1, IS_CASE_PASS)) if len(FAIL_COLLECT) > 0: for i in FAIL_COLLECT: print("Failed Step(s): %s" % str(i)) if IS_CASE_PASS: log_write('PASS', "Test Case %s Execution Finished" % TEST_CASE_ID) else: log_write('FAIL', "Test Case %s Execution Finished" % TEST_CASE_ID)
test_signal.py
import os import random import signal import socket import statistics import subprocess import sys import threading import time import unittest from test import support from test.support.script_helper import assert_python_ok, spawn_python try: import _testcapi except ImportError: _testcapi = None class GenericTests(unittest.TestCase): def test_enums(self): for name in dir(signal): sig = getattr(signal, name) if name in {'SIG_DFL', 'SIG_IGN'}: self.assertIsInstance(sig, signal.Handlers) elif name in {'SIG_BLOCK', 'SIG_UNBLOCK', 'SIG_SETMASK'}: self.assertIsInstance(sig, signal.Sigmasks) elif name.startswith('SIG') and not name.startswith('SIG_'): self.assertIsInstance(sig, signal.Signals) elif name.startswith('CTRL_'): self.assertIsInstance(sig, signal.Signals) self.assertEqual(sys.platform, "win32") @unittest.skipIf(sys.platform == "win32", "Not valid on Windows") class PosixTests(unittest.TestCase): def trivial_signal_handler(self, *args): pass def test_out_of_range_signal_number_raises_error(self): self.assertRaises(ValueError, signal.getsignal, 4242) self.assertRaises(ValueError, signal.signal, 4242, self.trivial_signal_handler) def test_setting_signal_handler_to_none_raises_error(self): self.assertRaises(TypeError, signal.signal, signal.SIGUSR1, None) def test_getsignal(self): hup = signal.signal(signal.SIGHUP, self.trivial_signal_handler) self.assertIsInstance(hup, signal.Handlers) self.assertEqual(signal.getsignal(signal.SIGHUP), self.trivial_signal_handler) signal.signal(signal.SIGHUP, hup) self.assertEqual(signal.getsignal(signal.SIGHUP), hup) # Issue 3864, unknown if this affects earlier versions of freebsd also @unittest.skipIf(sys.platform=='freebsd6', 'inter process signals not reliable (do not mix well with threading) ' 'on freebsd6') def test_interprocess_signal(self): dirname = os.path.dirname(__file__) script = os.path.join(dirname, 'signalinterproctester.py') assert_python_ok(script) @unittest.skipUnless(sys.platform == "win32", "Windows specific") class WindowsSignalTests(unittest.TestCase): def test_issue9324(self): # Updated for issue #10003, adding SIGBREAK handler = lambda x, y: None checked = set() for sig in (signal.SIGABRT, signal.SIGBREAK, signal.SIGFPE, signal.SIGILL, signal.SIGINT, signal.SIGSEGV, signal.SIGTERM): # Set and then reset a handler for signals that work on windows. # Issue #18396, only for signals without a C-level handler. if signal.getsignal(sig) is not None: signal.signal(sig, signal.signal(sig, handler)) checked.add(sig) # Issue #18396: Ensure the above loop at least tested *something* self.assertTrue(checked) with self.assertRaises(ValueError): signal.signal(-1, handler) with self.assertRaises(ValueError): signal.signal(7, handler) class WakeupFDTests(unittest.TestCase): def test_invalid_fd(self): fd = support.make_bad_fd() self.assertRaises((ValueError, OSError), signal.set_wakeup_fd, fd) def test_invalid_socket(self): sock = socket.socket() fd = sock.fileno() sock.close() self.assertRaises((ValueError, OSError), signal.set_wakeup_fd, fd) def test_set_wakeup_fd_result(self): r1, w1 = os.pipe() self.addCleanup(os.close, r1) self.addCleanup(os.close, w1) r2, w2 = os.pipe() self.addCleanup(os.close, r2) self.addCleanup(os.close, w2) if hasattr(os, 'set_blocking'): os.set_blocking(w1, False) os.set_blocking(w2, False) signal.set_wakeup_fd(w1) self.assertEqual(signal.set_wakeup_fd(w2), w1) self.assertEqual(signal.set_wakeup_fd(-1), w2) self.assertEqual(signal.set_wakeup_fd(-1), -1) def test_set_wakeup_fd_socket_result(self): sock1 = socket.socket() self.addCleanup(sock1.close) sock1.setblocking(False) fd1 = sock1.fileno() sock2 = socket.socket() self.addCleanup(sock2.close) sock2.setblocking(False) fd2 = sock2.fileno() signal.set_wakeup_fd(fd1) self.assertEqual(signal.set_wakeup_fd(fd2), fd1) self.assertEqual(signal.set_wakeup_fd(-1), fd2) self.assertEqual(signal.set_wakeup_fd(-1), -1) # On Windows, files are always blocking and Windows does not provide a # function to test if a socket is in non-blocking mode. @unittest.skipIf(sys.platform == "win32", "tests specific to POSIX") def test_set_wakeup_fd_blocking(self): rfd, wfd = os.pipe() self.addCleanup(os.close, rfd) self.addCleanup(os.close, wfd) # fd must be non-blocking os.set_blocking(wfd, True) with self.assertRaises(ValueError) as cm: signal.set_wakeup_fd(wfd) self.assertEqual(str(cm.exception), "the fd %s must be in non-blocking mode" % wfd) # non-blocking is ok os.set_blocking(wfd, False) signal.set_wakeup_fd(wfd) signal.set_wakeup_fd(-1) @unittest.skipIf(sys.platform == "win32", "Not valid on Windows") class WakeupSignalTests(unittest.TestCase): @unittest.skipIf(_testcapi is None, 'need _testcapi') def check_wakeup(self, test_body, *signals, ordered=True): # use a subprocess to have only one thread code = """if 1: import _testcapi import os import signal import struct signals = {!r} def handler(signum, frame): pass def check_signum(signals): data = os.read(read, len(signals)+1) raised = struct.unpack('%uB' % len(data), data) if not {!r}: raised = set(raised) signals = set(signals) if raised != signals: raise Exception("%r != %r" % (raised, signals)) {} signal.signal(signal.SIGALRM, handler) read, write = os.pipe() os.set_blocking(write, False) signal.set_wakeup_fd(write) test() check_signum(signals) os.close(read) os.close(write) """.format(tuple(map(int, signals)), ordered, test_body) assert_python_ok('-c', code) @unittest.skipIf(_testcapi is None, 'need _testcapi') def test_wakeup_write_error(self): # Issue #16105: write() errors in the C signal handler should not # pass silently. # Use a subprocess to have only one thread. code = """if 1: import _testcapi import errno import os import signal import sys from test.support import captured_stderr def handler(signum, frame): 1/0 signal.signal(signal.SIGALRM, handler) r, w = os.pipe() os.set_blocking(r, False) # Set wakeup_fd a read-only file descriptor to trigger the error signal.set_wakeup_fd(r) try: with captured_stderr() as err: _testcapi.raise_signal(signal.SIGALRM) except ZeroDivisionError: # An ignored exception should have been printed out on stderr err = err.getvalue() if ('Exception ignored when trying to write to the signal wakeup fd' not in err): raise AssertionError(err) if ('OSError: [Errno %d]' % errno.EBADF) not in err: raise AssertionError(err) else: raise AssertionError("ZeroDivisionError not raised") os.close(r) os.close(w) """ r, w = os.pipe() try: os.write(r, b'x') except OSError: pass else: self.skipTest("OS doesn't report write() error on the read end of a pipe") finally: os.close(r) os.close(w) assert_python_ok('-c', code) def test_wakeup_fd_early(self): self.check_wakeup("""def test(): import select import time TIMEOUT_FULL = 10 TIMEOUT_HALF = 5 class InterruptSelect(Exception): pass def handler(signum, frame): raise InterruptSelect signal.signal(signal.SIGALRM, handler) signal.alarm(1) # We attempt to get a signal during the sleep, # before select is called try: select.select([], [], [], TIMEOUT_FULL) except InterruptSelect: pass else: raise Exception("select() was not interrupted") before_time = time.monotonic() select.select([read], [], [], TIMEOUT_FULL) after_time = time.monotonic() dt = after_time - before_time if dt >= TIMEOUT_HALF: raise Exception("%s >= %s" % (dt, TIMEOUT_HALF)) """, signal.SIGALRM) def test_wakeup_fd_during(self): self.check_wakeup("""def test(): import select import time TIMEOUT_FULL = 10 TIMEOUT_HALF = 5 class InterruptSelect(Exception): pass def handler(signum, frame): raise InterruptSelect signal.signal(signal.SIGALRM, handler) signal.alarm(1) before_time = time.monotonic() # We attempt to get a signal during the select call try: select.select([read], [], [], TIMEOUT_FULL) except InterruptSelect: pass else: raise Exception("select() was not interrupted") after_time = time.monotonic() dt = after_time - before_time if dt >= TIMEOUT_HALF: raise Exception("%s >= %s" % (dt, TIMEOUT_HALF)) """, signal.SIGALRM) def test_signum(self): self.check_wakeup("""def test(): import _testcapi signal.signal(signal.SIGUSR1, handler) _testcapi.raise_signal(signal.SIGUSR1) _testcapi.raise_signal(signal.SIGALRM) """, signal.SIGUSR1, signal.SIGALRM) @unittest.skipUnless(hasattr(signal, 'pthread_sigmask'), 'need signal.pthread_sigmask()') def test_pending(self): self.check_wakeup("""def test(): signum1 = signal.SIGUSR1 signum2 = signal.SIGUSR2 signal.signal(signum1, handler) signal.signal(signum2, handler) signal.pthread_sigmask(signal.SIG_BLOCK, (signum1, signum2)) _testcapi.raise_signal(signum1) _testcapi.raise_signal(signum2) # Unblocking the 2 signals calls the C signal handler twice signal.pthread_sigmask(signal.SIG_UNBLOCK, (signum1, signum2)) """, signal.SIGUSR1, signal.SIGUSR2, ordered=False) @unittest.skipUnless(hasattr(socket, 'socketpair'), 'need socket.socketpair') class WakeupSocketSignalTests(unittest.TestCase): @unittest.skipIf(_testcapi is None, 'need _testcapi') def test_socket(self): # use a subprocess to have only one thread code = """if 1: import signal import socket import struct import _testcapi signum = signal.SIGINT signals = (signum,) def handler(signum, frame): pass signal.signal(signum, handler) read, write = socket.socketpair() read.setblocking(False) write.setblocking(False) signal.set_wakeup_fd(write.fileno()) _testcapi.raise_signal(signum) data = read.recv(1) if not data: raise Exception("no signum written") raised = struct.unpack('B', data) if raised != signals: raise Exception("%r != %r" % (raised, signals)) read.close() write.close() """ assert_python_ok('-c', code) @unittest.skipIf(_testcapi is None, 'need _testcapi') def test_send_error(self): # Use a subprocess to have only one thread. if os.name == 'nt': action = 'send' else: action = 'write' code = """if 1: import errno import signal import socket import sys import time import _testcapi from test.support import captured_stderr signum = signal.SIGINT def handler(signum, frame): pass signal.signal(signum, handler) read, write = socket.socketpair() read.setblocking(False) write.setblocking(False) signal.set_wakeup_fd(write.fileno()) # Close sockets: send() will fail read.close() write.close() with captured_stderr() as err: _testcapi.raise_signal(signum) err = err.getvalue() if ('Exception ignored when trying to {action} to the signal wakeup fd' not in err): raise AssertionError(err) """.format(action=action) assert_python_ok('-c', code) @unittest.skipIf(sys.platform == "win32", "Not valid on Windows") class SiginterruptTest(unittest.TestCase): def readpipe_interrupted(self, interrupt): """Perform a read during which a signal will arrive. Return True if the read is interrupted by the signal and raises an exception. Return False if it returns normally. """ # use a subprocess to have only one thread, to have a timeout on the # blocking read and to not touch signal handling in this process code = """if 1: import errno import os import signal import sys interrupt = %r r, w = os.pipe() def handler(signum, frame): 1 / 0 signal.signal(signal.SIGALRM, handler) if interrupt is not None: signal.siginterrupt(signal.SIGALRM, interrupt) print("ready") sys.stdout.flush() # run the test twice try: for loop in range(2): # send a SIGALRM in a second (during the read) signal.alarm(1) try: # blocking call: read from a pipe without data os.read(r, 1) except ZeroDivisionError: pass else: sys.exit(2) sys.exit(3) finally: os.close(r) os.close(w) """ % (interrupt,) with spawn_python('-c', code) as process: try: # wait until the child process is loaded and has started first_line = process.stdout.readline() stdout, stderr = process.communicate(timeout=5.0) except subprocess.TimeoutExpired: process.kill() return False else: stdout = first_line + stdout exitcode = process.wait() if exitcode not in (2, 3): raise Exception("Child error (exit code %s): %r" % (exitcode, stdout)) return (exitcode == 3) def test_without_siginterrupt(self): # If a signal handler is installed and siginterrupt is not called # at all, when that signal arrives, it interrupts a syscall that's in # progress. interrupted = self.readpipe_interrupted(None) self.assertTrue(interrupted) def test_siginterrupt_on(self): # If a signal handler is installed and siginterrupt is called with # a true value for the second argument, when that signal arrives, it # interrupts a syscall that's in progress. interrupted = self.readpipe_interrupted(True) self.assertTrue(interrupted) def test_siginterrupt_off(self): # If a signal handler is installed and siginterrupt is called with # a false value for the second argument, when that signal arrives, it # does not interrupt a syscall that's in progress. interrupted = self.readpipe_interrupted(False) self.assertFalse(interrupted) @unittest.skipIf(sys.platform == "win32", "Not valid on Windows") class ItimerTest(unittest.TestCase): def setUp(self): self.hndl_called = False self.hndl_count = 0 self.itimer = None self.old_alarm = signal.signal(signal.SIGALRM, self.sig_alrm) def tearDown(self): signal.signal(signal.SIGALRM, self.old_alarm) if self.itimer is not None: # test_itimer_exc doesn't change this attr # just ensure that itimer is stopped signal.setitimer(self.itimer, 0) def sig_alrm(self, *args): self.hndl_called = True def sig_vtalrm(self, *args): self.hndl_called = True if self.hndl_count > 3: # it shouldn't be here, because it should have been disabled. raise signal.ItimerError("setitimer didn't disable ITIMER_VIRTUAL " "timer.") elif self.hndl_count == 3: # disable ITIMER_VIRTUAL, this function shouldn't be called anymore signal.setitimer(signal.ITIMER_VIRTUAL, 0) self.hndl_count += 1 def sig_prof(self, *args): self.hndl_called = True signal.setitimer(signal.ITIMER_PROF, 0) def test_itimer_exc(self): # XXX I'm assuming -1 is an invalid itimer, but maybe some platform # defines it ? self.assertRaises(signal.ItimerError, signal.setitimer, -1, 0) # Negative times are treated as zero on some platforms. if 0: self.assertRaises(signal.ItimerError, signal.setitimer, signal.ITIMER_REAL, -1) def test_itimer_real(self): self.itimer = signal.ITIMER_REAL signal.setitimer(self.itimer, 1.0) signal.pause() self.assertEqual(self.hndl_called, True) # Issue 3864, unknown if this affects earlier versions of freebsd also @unittest.skipIf(sys.platform in ('freebsd6', 'netbsd5'), 'itimer not reliable (does not mix well with threading) on some BSDs.') def test_itimer_virtual(self): self.itimer = signal.ITIMER_VIRTUAL signal.signal(signal.SIGVTALRM, self.sig_vtalrm) signal.setitimer(self.itimer, 0.3, 0.2) start_time = time.monotonic() while time.monotonic() - start_time < 60.0: # use up some virtual time by doing real work _ = pow(12345, 67890, 10000019) if signal.getitimer(self.itimer) == (0.0, 0.0): break # sig_vtalrm handler stopped this itimer else: # Issue 8424 self.skipTest("timeout: likely cause: machine too slow or load too " "high") # virtual itimer should be (0.0, 0.0) now self.assertEqual(signal.getitimer(self.itimer), (0.0, 0.0)) # and the handler should have been called self.assertEqual(self.hndl_called, True) # Issue 3864, unknown if this affects earlier versions of freebsd also @unittest.skipIf(sys.platform=='freebsd6', 'itimer not reliable (does not mix well with threading) on freebsd6') def test_itimer_prof(self): self.itimer = signal.ITIMER_PROF signal.signal(signal.SIGPROF, self.sig_prof) signal.setitimer(self.itimer, 0.2, 0.2) start_time = time.monotonic() while time.monotonic() - start_time < 60.0: # do some work _ = pow(12345, 67890, 10000019) if signal.getitimer(self.itimer) == (0.0, 0.0): break # sig_prof handler stopped this itimer else: # Issue 8424 self.skipTest("timeout: likely cause: machine too slow or load too " "high") # profiling itimer should be (0.0, 0.0) now self.assertEqual(signal.getitimer(self.itimer), (0.0, 0.0)) # and the handler should have been called self.assertEqual(self.hndl_called, True) def test_setitimer_tiny(self): # bpo-30807: C setitimer() takes a microsecond-resolution interval. # Check that float -> timeval conversion doesn't round # the interval down to zero, which would disable the timer. self.itimer = signal.ITIMER_REAL signal.setitimer(self.itimer, 1e-6) time.sleep(1) self.assertEqual(self.hndl_called, True) class PendingSignalsTests(unittest.TestCase): """ Test pthread_sigmask(), pthread_kill(), sigpending() and sigwait() functions. """ @unittest.skipUnless(hasattr(signal, 'sigpending'), 'need signal.sigpending()') def test_sigpending_empty(self): self.assertEqual(signal.sigpending(), set()) @unittest.skipUnless(hasattr(signal, 'pthread_sigmask'), 'need signal.pthread_sigmask()') @unittest.skipUnless(hasattr(signal, 'sigpending'), 'need signal.sigpending()') def test_sigpending(self): code = """if 1: import os import signal def handler(signum, frame): 1/0 signum = signal.SIGUSR1 signal.signal(signum, handler) signal.pthread_sigmask(signal.SIG_BLOCK, [signum]) os.kill(os.getpid(), signum) pending = signal.sigpending() for sig in pending: assert isinstance(sig, signal.Signals), repr(pending) if pending != {signum}: raise Exception('%s != {%s}' % (pending, signum)) try: signal.pthread_sigmask(signal.SIG_UNBLOCK, [signum]) except ZeroDivisionError: pass else: raise Exception("ZeroDivisionError not raised") """ assert_python_ok('-c', code) @unittest.skipUnless(hasattr(signal, 'pthread_kill'), 'need signal.pthread_kill()') def test_pthread_kill(self): code = """if 1: import signal import threading import sys signum = signal.SIGUSR1 def handler(signum, frame): 1/0 signal.signal(signum, handler) if sys.platform == 'freebsd6': # Issue #12392 and #12469: send a signal to the main thread # doesn't work before the creation of the first thread on # FreeBSD 6 def noop(): pass thread = threading.Thread(target=noop) thread.start() thread.join() tid = threading.get_ident() try: signal.pthread_kill(tid, signum) except ZeroDivisionError: pass else: raise Exception("ZeroDivisionError not raised") """ assert_python_ok('-c', code) @unittest.skipUnless(hasattr(signal, 'pthread_sigmask'), 'need signal.pthread_sigmask()') def wait_helper(self, blocked, test): """ test: body of the "def test(signum):" function. blocked: number of the blocked signal """ code = '''if 1: import signal import sys from signal import Signals def handler(signum, frame): 1/0 %s blocked = %s signum = signal.SIGALRM # child: block and wait the signal try: signal.signal(signum, handler) signal.pthread_sigmask(signal.SIG_BLOCK, [blocked]) # Do the tests test(signum) # The handler must not be called on unblock try: signal.pthread_sigmask(signal.SIG_UNBLOCK, [blocked]) except ZeroDivisionError: print("the signal handler has been called", file=sys.stderr) sys.exit(1) except BaseException as err: print("error: {}".format(err), file=sys.stderr) sys.stderr.flush() sys.exit(1) ''' % (test.strip(), blocked) # sig*wait* must be called with the signal blocked: since the current # process might have several threads running, use a subprocess to have # a single thread. assert_python_ok('-c', code) @unittest.skipUnless(hasattr(signal, 'sigwait'), 'need signal.sigwait()') def test_sigwait(self): self.wait_helper(signal.SIGALRM, ''' def test(signum): signal.alarm(1) received = signal.sigwait([signum]) assert isinstance(received, signal.Signals), received if received != signum: raise Exception('received %s, not %s' % (received, signum)) ''') @unittest.skipUnless(hasattr(signal, 'sigwaitinfo'), 'need signal.sigwaitinfo()') def test_sigwaitinfo(self): self.wait_helper(signal.SIGALRM, ''' def test(signum): signal.alarm(1) info = signal.sigwaitinfo([signum]) if info.si_signo != signum: raise Exception("info.si_signo != %s" % signum) ''') @unittest.skipUnless(hasattr(signal, 'sigtimedwait'), 'need signal.sigtimedwait()') def test_sigtimedwait(self): self.wait_helper(signal.SIGALRM, ''' def test(signum): signal.alarm(1) info = signal.sigtimedwait([signum], 10.1000) if info.si_signo != signum: raise Exception('info.si_signo != %s' % signum) ''') @unittest.skipUnless(hasattr(signal, 'sigtimedwait'), 'need signal.sigtimedwait()') def test_sigtimedwait_poll(self): # check that polling with sigtimedwait works self.wait_helper(signal.SIGALRM, ''' def test(signum): import os os.kill(os.getpid(), signum) info = signal.sigtimedwait([signum], 0) if info.si_signo != signum: raise Exception('info.si_signo != %s' % signum) ''') @unittest.skipUnless(hasattr(signal, 'sigtimedwait'), 'need signal.sigtimedwait()') def test_sigtimedwait_timeout(self): self.wait_helper(signal.SIGALRM, ''' def test(signum): received = signal.sigtimedwait([signum], 1.0) if received is not None: raise Exception("received=%r" % (received,)) ''') @unittest.skipUnless(hasattr(signal, 'sigtimedwait'), 'need signal.sigtimedwait()') def test_sigtimedwait_negative_timeout(self): signum = signal.SIGALRM self.assertRaises(ValueError, signal.sigtimedwait, [signum], -1.0) @unittest.skipUnless(hasattr(signal, 'sigwait'), 'need signal.sigwait()') @unittest.skipUnless(hasattr(signal, 'pthread_sigmask'), 'need signal.pthread_sigmask()') def test_sigwait_thread(self): # Check that calling sigwait() from a thread doesn't suspend the whole # process. A new interpreter is spawned to avoid problems when mixing # threads and fork(): only async-safe functions are allowed between # fork() and exec(). assert_python_ok("-c", """if True: import os, threading, sys, time, signal # the default handler terminates the process signum = signal.SIGUSR1 def kill_later(): # wait until the main thread is waiting in sigwait() time.sleep(1) os.kill(os.getpid(), signum) # the signal must be blocked by all the threads signal.pthread_sigmask(signal.SIG_BLOCK, [signum]) killer = threading.Thread(target=kill_later) killer.start() received = signal.sigwait([signum]) if received != signum: print("sigwait() received %s, not %s" % (received, signum), file=sys.stderr) sys.exit(1) killer.join() # unblock the signal, which should have been cleared by sigwait() signal.pthread_sigmask(signal.SIG_UNBLOCK, [signum]) """) @unittest.skipUnless(hasattr(signal, 'pthread_sigmask'), 'need signal.pthread_sigmask()') def test_pthread_sigmask_arguments(self): self.assertRaises(TypeError, signal.pthread_sigmask) self.assertRaises(TypeError, signal.pthread_sigmask, 1) self.assertRaises(TypeError, signal.pthread_sigmask, 1, 2, 3) self.assertRaises(OSError, signal.pthread_sigmask, 1700, []) @unittest.skipUnless(hasattr(signal, 'pthread_sigmask'), 'need signal.pthread_sigmask()') def test_pthread_sigmask(self): code = """if 1: import signal import os; import threading def handler(signum, frame): 1/0 def kill(signum): os.kill(os.getpid(), signum) def check_mask(mask): for sig in mask: assert isinstance(sig, signal.Signals), repr(sig) def read_sigmask(): sigmask = signal.pthread_sigmask(signal.SIG_BLOCK, []) check_mask(sigmask) return sigmask signum = signal.SIGUSR1 # Install our signal handler old_handler = signal.signal(signum, handler) # Unblock SIGUSR1 (and copy the old mask) to test our signal handler old_mask = signal.pthread_sigmask(signal.SIG_UNBLOCK, [signum]) check_mask(old_mask) try: kill(signum) except ZeroDivisionError: pass else: raise Exception("ZeroDivisionError not raised") # Block and then raise SIGUSR1. The signal is blocked: the signal # handler is not called, and the signal is now pending mask = signal.pthread_sigmask(signal.SIG_BLOCK, [signum]) check_mask(mask) kill(signum) # Check the new mask blocked = read_sigmask() check_mask(blocked) if signum not in blocked: raise Exception("%s not in %s" % (signum, blocked)) if old_mask ^ blocked != {signum}: raise Exception("%s ^ %s != {%s}" % (old_mask, blocked, signum)) # Unblock SIGUSR1 try: # unblock the pending signal calls immediately the signal handler signal.pthread_sigmask(signal.SIG_UNBLOCK, [signum]) except ZeroDivisionError: pass else: raise Exception("ZeroDivisionError not raised") try: kill(signum) except ZeroDivisionError: pass else: raise Exception("ZeroDivisionError not raised") # Check the new mask unblocked = read_sigmask() if signum in unblocked: raise Exception("%s in %s" % (signum, unblocked)) if blocked ^ unblocked != {signum}: raise Exception("%s ^ %s != {%s}" % (blocked, unblocked, signum)) if old_mask != unblocked: raise Exception("%s != %s" % (old_mask, unblocked)) """ assert_python_ok('-c', code) @unittest.skipIf(sys.platform == 'freebsd6', "issue #12392: send a signal to the main thread doesn't work " "before the creation of the first thread on FreeBSD 6") @unittest.skipUnless(hasattr(signal, 'pthread_kill'), 'need signal.pthread_kill()') def test_pthread_kill_main_thread(self): # Test that a signal can be sent to the main thread with pthread_kill() # before any other thread has been created (see issue #12392). code = """if True: import threading import signal import sys def handler(signum, frame): sys.exit(3) signal.signal(signal.SIGUSR1, handler) signal.pthread_kill(threading.get_ident(), signal.SIGUSR1) sys.exit(2) """ with spawn_python('-c', code) as process: stdout, stderr = process.communicate() exitcode = process.wait() if exitcode != 3: raise Exception("Child error (exit code %s): %s" % (exitcode, stdout)) class StressTest(unittest.TestCase): """ Stress signal delivery, especially when a signal arrives in the middle of recomputing the signal state or executing previously tripped signal handlers. """ def setsig(self, signum, handler): old_handler = signal.signal(signum, handler) self.addCleanup(signal.signal, signum, old_handler) def measure_itimer_resolution(self): N = 20 times = [] def handler(signum=None, frame=None): if len(times) < N: times.append(time.perf_counter()) # 1 µs is the smallest possible timer interval, # we want to measure what the concrete duration # will be on this platform signal.setitimer(signal.ITIMER_REAL, 1e-6) self.addCleanup(signal.setitimer, signal.ITIMER_REAL, 0) self.setsig(signal.SIGALRM, handler) handler() while len(times) < N: time.sleep(1e-3) durations = [times[i+1] - times[i] for i in range(len(times) - 1)] med = statistics.median(durations) if support.verbose: print("detected median itimer() resolution: %.6f s." % (med,)) return med def decide_itimer_count(self): # Some systems have poor setitimer() resolution (for example # measured around 20 ms. on FreeBSD 9), so decide on a reasonable # number of sequential timers based on that. reso = self.measure_itimer_resolution() if reso <= 1e-4: return 10000 elif reso <= 1e-2: return 100 else: self.skipTest("detected itimer resolution (%.3f s.) too high " "(> 10 ms.) on this platform (or system too busy)" % (reso,)) @unittest.skipUnless(hasattr(signal, "setitimer"), "test needs setitimer()") def test_stress_delivery_dependent(self): """ This test uses dependent signal handlers. """ N = self.decide_itimer_count() sigs = [] def first_handler(signum, frame): # 1e-6 is the minimum non-zero value for `setitimer()`. # Choose a random delay so as to improve chances of # triggering a race condition. Ideally the signal is received # when inside critical signal-handling routines such as # Py_MakePendingCalls(). signal.setitimer(signal.ITIMER_REAL, 1e-6 + random.random() * 1e-5) def second_handler(signum=None, frame=None): sigs.append(signum) # Here on Linux, SIGPROF > SIGALRM > SIGUSR1. By using both # ascending and descending sequences (SIGUSR1 then SIGALRM, # SIGPROF then SIGALRM), we maximize chances of hitting a bug. self.setsig(signal.SIGPROF, first_handler) self.setsig(signal.SIGUSR1, first_handler) self.setsig(signal.SIGALRM, second_handler) # for ITIMER_REAL expected_sigs = 0 deadline = time.time() + 15.0 while expected_sigs < N: os.kill(os.getpid(), signal.SIGPROF) expected_sigs += 1 # Wait for handlers to run to avoid signal coalescing while len(sigs) < expected_sigs and time.time() < deadline: time.sleep(1e-5) os.kill(os.getpid(), signal.SIGUSR1) expected_sigs += 1 while len(sigs) < expected_sigs and time.time() < deadline: time.sleep(1e-5) # All ITIMER_REAL signals should have been delivered to the # Python handler self.assertEqual(len(sigs), N, "Some signals were lost") @unittest.skipUnless(hasattr(signal, "setitimer"), "test needs setitimer()") def test_stress_delivery_simultaneous(self): """ This test uses simultaneous signal handlers. """ N = self.decide_itimer_count() sigs = [] def handler(signum, frame): sigs.append(signum) self.setsig(signal.SIGUSR1, handler) self.setsig(signal.SIGALRM, handler) # for ITIMER_REAL expected_sigs = 0 deadline = time.time() + 15.0 while expected_sigs < N: # Hopefully the SIGALRM will be received somewhere during # initial processing of SIGUSR1. signal.setitimer(signal.ITIMER_REAL, 1e-6 + random.random() * 1e-5) os.kill(os.getpid(), signal.SIGUSR1) expected_sigs += 2 # Wait for handlers to run to avoid signal coalescing while len(sigs) < expected_sigs and time.time() < deadline: time.sleep(1e-5) # All ITIMER_REAL signals should have been delivered to the # Python handler self.assertEqual(len(sigs), N, "Some signals were lost") def tearDownModule(): support.reap_children() if __name__ == "__main__": unittest.main()
eval_jcr.py
import os import torch import torch.multiprocessing as mp from utils.evaluation import UntrimmedDatasetEvaluator from utils.misc import get_folders_and_files os.environ['CUDA_VISIBLE_DEVICES'] = '0' mp.set_start_method('spawn', force=True) def benchmark(evaluator: UntrimmedDatasetEvaluator, m_path: str, o_path: str): evaluator.set_model(m_path, o_path) evaluator.run_evaluation() def kill_all_processes(p_list: list): for process in p_list: process.terminate() if __name__ == '__main__': use_gpu = False # torch.cuda.is_available() device = torch.device('cuda:0' if use_gpu else 'cpu') dataset_path = './dataset/OAD_Dataset_translated.skeldat' output_path = ('./validation' '/') cde = UntrimmedDatasetEvaluator(dataset_path=dataset_path, device=device) models_for_testing = get_folders_and_files(output_path)[1] processes = [] for model in models_for_testing: if not model.endswith('.tar'): continue model_path = output_path + model p = mp.Process(name=model, target=benchmark, args=(cde, model_path, output_path)) p.daemon = True # set false if not running in interactive mode p.start() processes.append(p)
species.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from . import BaseSpecies from .utils import * import threading from itertools import product class SimpleSpecies(BaseSpecies): pass class DualSpecies(BaseSpecies): params = {'n_elders':0.5, 'mate_prob':0.75} @property def male_population(self): return self.populations[0] @property def female_population(self): return self.populations[1] @property def males(self): return self.populations[0].individuals @property def females(self): return self.populations[1].individuals @property def male_fitness(self): return self.populations[0].fitness @property def female_fitness(self): return self.populations[1].fitness def mate(self): self.populations[0].rank(tied=True) self.populations[1].rank(tied=True) children = [] def _target(male, female): if random()<0.5: child = male.cross(female) else: child = female.cross(male) children.append(child) ps = [threading.Thread(target=_target, args=(male, female)) for male, female in product(self.males, self.females) if random() < self.mate_prob and self.match(male, female)] for p in ps: p.start() for p in ps: p.join() self.populations[0].add_individuals(children[::2]) self.populations[1].add_individuals(children[1::2]) def match(self, male, female): return True def transit(self, *args, **kwargs): elder = self.__class__([ self.populations[0].__class__(self.populations[0].get_best_individuals(self.n_elders * self.populations[0].default_size)), self.populations[1].__class__(self.populations[1].get_best_individuals(self.n_elders * self.populations[1].default_size)) ]).clone() self.select() self.mate() self.mutate() self.merge(elder) def select(self): self.populations[0].select() self.populations[1].select() def mutate(self): self.populations[0].mutate() self.populations[1].mutate() def merge(self, other): self.populations[0].merge(other.populations[0]) self.populations[1].merge(other.populations[1]) def post_process(self): super(DualSpecies, self).post_process() self.populations[0].fitness = self.populations[1].fitness = None
eventlet.py
"""A eventlet based handler.""" from __future__ import absolute_import import contextlib import logging import eventlet from eventlet.green import select as green_select from eventlet.green import socket as green_socket from eventlet.green import time as green_time from eventlet.green import threading as green_threading from eventlet import queue as green_queue from kazoo.handlers import utils import kazoo.python2atexit as python2atexit LOG = logging.getLogger(__name__) # sentinel objects _STOP = object() @contextlib.contextmanager def _yield_before_after(): # Yield to any other co-routines... # # See: http://eventlet.net/doc/modules/greenthread.html # for how this zero sleep is really a cooperative yield to other potential # co-routines... eventlet.sleep(0) try: yield finally: eventlet.sleep(0) class TimeoutError(Exception): pass class AsyncResult(utils.AsyncResult): """A one-time event that stores a value or an exception""" def __init__(self, handler): super(AsyncResult, self).__init__(handler, green_threading.Condition, TimeoutError) class SequentialEventletHandler(object): """Eventlet handler for sequentially executing callbacks. This handler executes callbacks in a sequential manner. A queue is created for each of the callback events, so that each type of event has its callback type run sequentially. These are split into two queues, one for watch events and one for async result completion callbacks. Each queue type has a greenthread worker that pulls the callback event off the queue and runs it in the order the client sees it. This split helps ensure that watch callbacks won't block session re-establishment should the connection be lost during a Zookeeper client call. Watch and completion callbacks should avoid blocking behavior as the next callback of that type won't be run until it completes. If you need to block, spawn a new greenthread and return immediately so callbacks can proceed. .. note:: Completion callbacks can block to wait on Zookeeper calls, but no other completion callbacks will execute until the callback returns. """ name = "sequential_eventlet_handler" queue_impl = green_queue.LightQueue queue_empty = green_queue.Empty def __init__(self): """Create a :class:`SequentialEventletHandler` instance""" self.callback_queue = self.queue_impl() self.completion_queue = self.queue_impl() self._workers = [] self._started = False @staticmethod def sleep_func(wait): green_time.sleep(wait) @property def running(self): return self._started timeout_exception = TimeoutError def _process_completion_queue(self): while True: cb = self.completion_queue.get() if cb is _STOP: break try: with _yield_before_after(): cb() except Exception: LOG.warning("Exception in worker completion queue greenlet", exc_info=True) def _process_callback_queue(self): while True: cb = self.callback_queue.get() if cb is _STOP: break try: with _yield_before_after(): cb() except Exception: LOG.warning("Exception in worker callback queue greenlet", exc_info=True) def start(self): if not self._started: # Spawn our worker threads, we have # - A callback worker for watch events to be called # - A completion worker for completion events to be called w = eventlet.spawn(self._process_completion_queue) self._workers.append((w, self.completion_queue)) w = eventlet.spawn(self._process_callback_queue) self._workers.append((w, self.callback_queue)) self._started = True python2atexit.register(self.stop) def stop(self): while self._workers: w, q = self._workers.pop() q.put(_STOP) w.wait() self._started = False python2atexit.unregister(self.stop) def socket(self, *args, **kwargs): return utils.create_tcp_socket(green_socket) def create_socket_pair(self): return utils.create_socket_pair(green_socket) def event_object(self): return green_threading.Event() def lock_object(self): return green_threading.Lock() def rlock_object(self): return green_threading.RLock() def create_connection(self, *args, **kwargs): return utils.create_tcp_connection(green_socket, *args, **kwargs) def select(self, *args, **kwargs): with _yield_before_after(): return green_select.select(*args, **kwargs) def async_result(self): return AsyncResult(self) def spawn(self, func, *args, **kwargs): t = green_threading.Thread(target=func, args=args, kwargs=kwargs) t.daemon = True t.start() return t def dispatch_callback(self, callback): self.callback_queue.put(lambda: callback.func(*callback.args))
httpclient_test.py
from __future__ import absolute_import, division, print_function import base64 import binascii from contextlib import closing import copy import functools import sys import threading import datetime from io import BytesIO from tornado.escape import utf8, native_str from tornado import gen from tornado.httpclient import HTTPRequest, HTTPResponse, _RequestProxy, HTTPError, HTTPClient from tornado.httpserver import HTTPServer from tornado.ioloop import IOLoop from tornado.iostream import IOStream from tornado.log import gen_log from tornado import netutil from tornado.stack_context import ExceptionStackContext, NullContext from tornado.testing import AsyncHTTPTestCase, bind_unused_port, gen_test, ExpectLog from tornado.test.util import unittest, skipOnTravis from tornado.web import Application, RequestHandler, url from tornado.httputil import format_timestamp, HTTPHeaders class HelloWorldHandler(RequestHandler): def get(self): name = self.get_argument("name", "world") self.set_header("Content-Type", "text/plain") self.finish("Hello %s!" % name) class PostHandler(RequestHandler): def post(self): self.finish("Post arg1: %s, arg2: %s" % ( self.get_argument("arg1"), self.get_argument("arg2"))) class PutHandler(RequestHandler): def put(self): self.write("Put body: ") self.write(self.request.body) class RedirectHandler(RequestHandler): def prepare(self): self.write('redirects can have bodies too') self.redirect(self.get_argument("url"), status=int(self.get_argument("status", "302"))) class ChunkHandler(RequestHandler): @gen.coroutine def get(self): self.write("asdf") self.flush() # Wait a bit to ensure the chunks are sent and received separately. yield gen.sleep(0.01) self.write("qwer") class AuthHandler(RequestHandler): def get(self): self.finish(self.request.headers["Authorization"]) class CountdownHandler(RequestHandler): def get(self, count): count = int(count) if count > 0: self.redirect(self.reverse_url("countdown", count - 1)) else: self.write("Zero") class EchoPostHandler(RequestHandler): def post(self): self.write(self.request.body) class UserAgentHandler(RequestHandler): def get(self): self.write(self.request.headers.get('User-Agent', 'User agent not set')) class ContentLength304Handler(RequestHandler): def get(self): self.set_status(304) self.set_header('Content-Length', 42) def _clear_headers_for_304(self): # Tornado strips content-length from 304 responses, but here we # want to simulate servers that include the headers anyway. pass class PatchHandler(RequestHandler): def patch(self): "Return the request payload - so we can check it is being kept" self.write(self.request.body) class AllMethodsHandler(RequestHandler): SUPPORTED_METHODS = RequestHandler.SUPPORTED_METHODS + ('OTHER',) def method(self): self.write(self.request.method) get = post = put = delete = options = patch = other = method class SetHeaderHandler(RequestHandler): def get(self): # Use get_arguments for keys to get strings, but # request.arguments for values to get bytes. for k, v in zip(self.get_arguments('k'), self.request.arguments['v']): self.set_header(k, v) # These tests end up getting run redundantly: once here with the default # HTTPClient implementation, and then again in each implementation's own # test suite. class HTTPClientCommonTestCase(AsyncHTTPTestCase): def get_app(self): return Application([ url("/hello", HelloWorldHandler), url("/post", PostHandler), url("/put", PutHandler), url("/redirect", RedirectHandler), url("/chunk", ChunkHandler), url("/auth", AuthHandler), url("/countdown/([0-9]+)", CountdownHandler, name="countdown"), url("/echopost", EchoPostHandler), url("/user_agent", UserAgentHandler), url("/304_with_content_length", ContentLength304Handler), url("/all_methods", AllMethodsHandler), url('/patch', PatchHandler), url('/set_header', SetHeaderHandler), ], gzip=True) def test_patch_receives_payload(self): body = b"some patch data" response = self.fetch("/patch", method='PATCH', body=body) self.assertEqual(response.code, 200) self.assertEqual(response.body, body) @skipOnTravis def test_hello_world(self): response = self.fetch("/hello") self.assertEqual(response.code, 200) self.assertEqual(response.headers["Content-Type"], "text/plain") self.assertEqual(response.body, b"Hello world!") self.assertEqual(int(response.request_time), 0) response = self.fetch("/hello?name=Ben") self.assertEqual(response.body, b"Hello Ben!") def test_streaming_callback(self): # streaming_callback is also tested in test_chunked chunks = [] response = self.fetch("/hello", streaming_callback=chunks.append) # with streaming_callback, data goes to the callback and not response.body self.assertEqual(chunks, [b"Hello world!"]) self.assertFalse(response.body) def test_post(self): response = self.fetch("/post", method="POST", body="arg1=foo&arg2=bar") self.assertEqual(response.code, 200) self.assertEqual(response.body, b"Post arg1: foo, arg2: bar") def test_chunked(self): response = self.fetch("/chunk") self.assertEqual(response.body, b"asdfqwer") chunks = [] response = self.fetch("/chunk", streaming_callback=chunks.append) self.assertEqual(chunks, [b"asdf", b"qwer"]) self.assertFalse(response.body) def test_chunked_close(self): # test case in which chunks spread read-callback processing # over several ioloop iterations, but the connection is already closed. sock, port = bind_unused_port() with closing(sock): def write_response(stream, request_data): if b"HTTP/1." not in request_data: self.skipTest("requires HTTP/1.x") stream.write(b"""\ HTTP/1.1 200 OK Transfer-Encoding: chunked 1 1 1 2 0 """.replace(b"\n", b"\r\n"), callback=stream.close) def accept_callback(conn, address): # fake an HTTP server using chunked encoding where the final chunks # and connection close all happen at once stream = IOStream(conn) stream.read_until(b"\r\n\r\n", functools.partial(write_response, stream)) netutil.add_accept_handler(sock, accept_callback) self.http_client.fetch("http://127.0.0.1:%d/" % port, self.stop) resp = self.wait() resp.rethrow() self.assertEqual(resp.body, b"12") self.io_loop.remove_handler(sock.fileno()) def test_streaming_stack_context(self): chunks = [] exc_info = [] def error_handler(typ, value, tb): exc_info.append((typ, value, tb)) return True def streaming_cb(chunk): chunks.append(chunk) if chunk == b'qwer': 1 / 0 with ExceptionStackContext(error_handler): self.fetch('/chunk', streaming_callback=streaming_cb) self.assertEqual(chunks, [b'asdf', b'qwer']) self.assertEqual(1, len(exc_info)) self.assertIs(exc_info[0][0], ZeroDivisionError) def test_basic_auth(self): self.assertEqual(self.fetch("/auth", auth_username="Aladdin", auth_password="open sesame").body, b"Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==") def test_basic_auth_explicit_mode(self): self.assertEqual(self.fetch("/auth", auth_username="Aladdin", auth_password="open sesame", auth_mode="basic").body, b"Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==") def test_unsupported_auth_mode(self): # curl and simple clients handle errors a bit differently; the # important thing is that they don't fall back to basic auth # on an unknown mode. with ExpectLog(gen_log, "uncaught exception", required=False): with self.assertRaises((ValueError, HTTPError)): response = self.fetch("/auth", auth_username="Aladdin", auth_password="open sesame", auth_mode="asdf") response.rethrow() def test_follow_redirect(self): response = self.fetch("/countdown/2", follow_redirects=False) self.assertEqual(302, response.code) self.assertTrue(response.headers["Location"].endswith("/countdown/1")) response = self.fetch("/countdown/2") self.assertEqual(200, response.code) self.assertTrue(response.effective_url.endswith("/countdown/0")) self.assertEqual(b"Zero", response.body) def test_credentials_in_url(self): url = self.get_url("/auth").replace("http://", "http://me:secret@") self.http_client.fetch(url, self.stop) response = self.wait() self.assertEqual(b"Basic " + base64.b64encode(b"me:secret"), response.body) def test_body_encoding(self): unicode_body = u"\xe9" byte_body = binascii.a2b_hex(b"e9") # unicode string in body gets converted to utf8 response = self.fetch("/echopost", method="POST", body=unicode_body, headers={"Content-Type": "application/blah"}) self.assertEqual(response.headers["Content-Length"], "2") self.assertEqual(response.body, utf8(unicode_body)) # byte strings pass through directly response = self.fetch("/echopost", method="POST", body=byte_body, headers={"Content-Type": "application/blah"}) self.assertEqual(response.headers["Content-Length"], "1") self.assertEqual(response.body, byte_body) # Mixing unicode in headers and byte string bodies shouldn't # break anything response = self.fetch("/echopost", method="POST", body=byte_body, headers={"Content-Type": "application/blah"}, user_agent=u"foo") self.assertEqual(response.headers["Content-Length"], "1") self.assertEqual(response.body, byte_body) def test_types(self): response = self.fetch("/hello") self.assertEqual(type(response.body), bytes) self.assertEqual(type(response.headers["Content-Type"]), str) self.assertEqual(type(response.code), int) self.assertEqual(type(response.effective_url), str) def test_header_callback(self): first_line = [] headers = {} chunks = [] def header_callback(header_line): if header_line.startswith('HTTP/1.1 101'): # Upgrading to HTTP/2 pass elif header_line.startswith('HTTP/'): first_line.append(header_line) elif header_line != '\r\n': k, v = header_line.split(':', 1) headers[k.lower()] = v.strip() def streaming_callback(chunk): # All header callbacks are run before any streaming callbacks, # so the header data is available to process the data as it # comes in. self.assertEqual(headers['content-type'], 'text/html; charset=UTF-8') chunks.append(chunk) self.fetch('/chunk', header_callback=header_callback, streaming_callback=streaming_callback) self.assertEqual(len(first_line), 1, first_line) self.assertRegexpMatches(first_line[0], 'HTTP/[0-9]\\.[0-9] 200.*\r\n') self.assertEqual(chunks, [b'asdf', b'qwer']) def test_header_callback_stack_context(self): exc_info = [] def error_handler(typ, value, tb): exc_info.append((typ, value, tb)) return True def header_callback(header_line): if header_line.lower().startswith('content-type:'): 1 / 0 with ExceptionStackContext(error_handler): self.fetch('/chunk', header_callback=header_callback) self.assertEqual(len(exc_info), 1) self.assertIs(exc_info[0][0], ZeroDivisionError) def test_configure_defaults(self): defaults = dict(user_agent='TestDefaultUserAgent', allow_ipv6=False) # Construct a new instance of the configured client class client = self.http_client.__class__(force_instance=True, defaults=defaults) try: client.fetch(self.get_url('/user_agent'), callback=self.stop) response = self.wait() self.assertEqual(response.body, b'TestDefaultUserAgent') finally: client.close() def test_header_types(self): # Header values may be passed as character or utf8 byte strings, # in a plain dictionary or an HTTPHeaders object. # Keys must always be the native str type. # All combinations should have the same results on the wire. for value in [u"MyUserAgent", b"MyUserAgent"]: for container in [dict, HTTPHeaders]: headers = container() headers['User-Agent'] = value resp = self.fetch('/user_agent', headers=headers) self.assertEqual( resp.body, b"MyUserAgent", "response=%r, value=%r, container=%r" % (resp.body, value, container)) def test_multi_line_headers(self): # Multi-line http headers are rare but rfc-allowed # http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2 sock, port = bind_unused_port() with closing(sock): def write_response(stream, request_data): if b"HTTP/1." not in request_data: self.skipTest("requires HTTP/1.x") stream.write(b"""\ HTTP/1.1 200 OK X-XSS-Protection: 1; \tmode=block """.replace(b"\n", b"\r\n"), callback=stream.close) def accept_callback(conn, address): stream = IOStream(conn) stream.read_until(b"\r\n\r\n", functools.partial(write_response, stream)) netutil.add_accept_handler(sock, accept_callback) self.http_client.fetch("http://127.0.0.1:%d/" % port, self.stop) resp = self.wait() resp.rethrow() self.assertEqual(resp.headers['X-XSS-Protection'], "1; mode=block") self.io_loop.remove_handler(sock.fileno()) def test_304_with_content_length(self): # According to the spec 304 responses SHOULD NOT include # Content-Length or other entity headers, but some servers do it # anyway. # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5 response = self.fetch('/304_with_content_length') self.assertEqual(response.code, 304) self.assertEqual(response.headers['Content-Length'], '42') def test_final_callback_stack_context(self): # The final callback should be run outside of the httpclient's # stack_context. We want to ensure that there is not stack_context # between the user's callback and the IOLoop, so monkey-patch # IOLoop.handle_callback_exception and disable the test harness's # context with a NullContext. # Note that this does not apply to secondary callbacks (header # and streaming_callback), as errors there must be seen as errors # by the http client so it can clean up the connection. exc_info = [] def handle_callback_exception(callback): exc_info.append(sys.exc_info()) self.stop() self.io_loop.handle_callback_exception = handle_callback_exception with NullContext(): self.http_client.fetch(self.get_url('/hello'), lambda response: 1 / 0) self.wait() self.assertEqual(exc_info[0][0], ZeroDivisionError) @gen_test def test_future_interface(self): response = yield self.http_client.fetch(self.get_url('/hello')) self.assertEqual(response.body, b'Hello world!') @gen_test def test_future_http_error(self): with self.assertRaises(HTTPError) as context: yield self.http_client.fetch(self.get_url('/notfound')) self.assertEqual(context.exception.code, 404) self.assertEqual(context.exception.response.code, 404) @gen_test def test_future_http_error_no_raise(self): response = yield self.http_client.fetch(self.get_url('/notfound'), raise_error=False) self.assertEqual(response.code, 404) @gen_test def test_reuse_request_from_response(self): # The response.request attribute should be an HTTPRequest, not # a _RequestProxy. # This test uses self.http_client.fetch because self.fetch calls # self.get_url on the input unconditionally. url = self.get_url('/hello') response = yield self.http_client.fetch(url) self.assertEqual(response.request.url, url) self.assertTrue(isinstance(response.request, HTTPRequest)) response2 = yield self.http_client.fetch(response.request) self.assertEqual(response2.body, b'Hello world!') def test_all_methods(self): for method in ['GET', 'DELETE', 'OPTIONS']: response = self.fetch('/all_methods', method=method) self.assertEqual(response.body, utf8(method)) for method in ['POST', 'PUT', 'PATCH']: response = self.fetch('/all_methods', method=method, body=b'') self.assertEqual(response.body, utf8(method)) response = self.fetch('/all_methods', method='HEAD') self.assertEqual(response.body, b'') response = self.fetch('/all_methods', method='OTHER', allow_nonstandard_methods=True) self.assertEqual(response.body, b'OTHER') def test_body_sanity_checks(self): # These methods require a body. for method in ('POST', 'PUT', 'PATCH'): with self.assertRaises(ValueError) as context: resp = self.fetch('/all_methods', method=method) resp.rethrow() self.assertIn('must not be None', str(context.exception)) resp = self.fetch('/all_methods', method=method, allow_nonstandard_methods=True) self.assertEqual(resp.code, 200) # These methods don't allow a body. for method in ('GET', 'DELETE', 'OPTIONS'): with self.assertRaises(ValueError) as context: resp = self.fetch('/all_methods', method=method, body=b'asdf') resp.rethrow() self.assertIn('must be None', str(context.exception)) # In most cases this can be overridden, but curl_httpclient # does not allow body with a GET at all. if method != 'GET': resp = self.fetch('/all_methods', method=method, body=b'asdf', allow_nonstandard_methods=True) resp.rethrow() self.assertEqual(resp.code, 200) # This test causes odd failures with the combination of # curl_httpclient (at least with the version of libcurl available # on ubuntu 12.04), TwistedIOLoop, and epoll. For POST (but not PUT), # curl decides the response came back too soon and closes the connection # to start again. It does this *before* telling the socket callback to # unregister the FD. Some IOLoop implementations have special kernel # integration to discover this immediately. Tornado's IOLoops # ignore errors on remove_handler to accommodate this behavior, but # Twisted's reactor does not. The removeReader call fails and so # do all future removeAll calls (which our tests do at cleanup). # # def test_post_307(self): # response = self.fetch("/redirect?status=307&url=/post", # method="POST", body=b"arg1=foo&arg2=bar") # self.assertEqual(response.body, b"Post arg1: foo, arg2: bar") def test_put_307(self): response = self.fetch("/redirect?status=307&url=/put", method="PUT", body=b"hello") response.rethrow() self.assertEqual(response.body, b"Put body: hello") def test_non_ascii_header(self): # Non-ascii headers are sent as latin1. response = self.fetch("/set_header?k=foo&v=%E9") response.rethrow() self.assertEqual(response.headers["Foo"], native_str(u"\u00e9")) class RequestProxyTest(unittest.TestCase): def test_request_set(self): proxy = _RequestProxy(HTTPRequest('http://example.com/', user_agent='foo'), dict()) self.assertEqual(proxy.user_agent, 'foo') def test_default_set(self): proxy = _RequestProxy(HTTPRequest('http://example.com/'), dict(network_interface='foo')) self.assertEqual(proxy.network_interface, 'foo') def test_both_set(self): proxy = _RequestProxy(HTTPRequest('http://example.com/', proxy_host='foo'), dict(proxy_host='bar')) self.assertEqual(proxy.proxy_host, 'foo') def test_neither_set(self): proxy = _RequestProxy(HTTPRequest('http://example.com/'), dict()) self.assertIs(proxy.auth_username, None) def test_bad_attribute(self): proxy = _RequestProxy(HTTPRequest('http://example.com/'), dict()) with self.assertRaises(AttributeError): proxy.foo def test_defaults_none(self): proxy = _RequestProxy(HTTPRequest('http://example.com/'), None) self.assertIs(proxy.auth_username, None) class HTTPResponseTestCase(unittest.TestCase): def test_str(self): response = HTTPResponse(HTTPRequest('http://example.com'), 200, headers={}, buffer=BytesIO()) s = str(response) self.assertTrue(s.startswith('HTTPResponse(')) self.assertIn('code=200', s) class SyncHTTPClientTest(unittest.TestCase): def setUp(self): if IOLoop.configured_class().__name__ == 'TwistedIOLoop': # TwistedIOLoop only supports the global reactor, so we can't have # separate IOLoops for client and server threads. raise unittest.SkipTest( 'Sync HTTPClient not compatible with TwistedIOLoop') self.server_ioloop = IOLoop() @gen.coroutine def init_server(): sock, self.port = bind_unused_port() app = Application([('/', HelloWorldHandler)]) self.server = HTTPServer(app) self.server.add_socket(sock) self.server_ioloop.run_sync(init_server) self.server_thread = threading.Thread(target=self.server_ioloop.start) self.server_thread.start() self.http_client = HTTPClient() def tearDown(self): def stop_server(): self.server.stop() # Delay the shutdown of the IOLoop by several iterations because # the server may still have some cleanup work left when # the client finishes with the response (this is noticeable # with http/2, which leaves a Future with an unexamined # StreamClosedError on the loop). @gen.coroutine def slow_stop(): # The number of iterations is difficult to predict. Typically, # one is sufficient, although sometimes it needs more. for i in range(5): yield self.server_ioloop.stop() self.server_ioloop.add_callback(slow_stop) self.server_ioloop.add_callback(stop_server) self.server_thread.join() self.http_client.close() self.server_ioloop.close(all_fds=True) def get_url(self, path): return 'http://127.0.0.1:%d%s' % (self.port, path) def test_sync_client(self): response = self.http_client.fetch(self.get_url('/')) self.assertEqual(b'Hello world!', response.body) def test_sync_client_error(self): # Synchronous HTTPClient raises errors directly; no need for # response.rethrow() with self.assertRaises(HTTPError) as assertion: self.http_client.fetch(self.get_url('/notfound')) self.assertEqual(assertion.exception.code, 404) class HTTPRequestTestCase(unittest.TestCase): def test_headers(self): request = HTTPRequest('http://example.com', headers={'foo': 'bar'}) self.assertEqual(request.headers, {'foo': 'bar'}) def test_headers_setter(self): request = HTTPRequest('http://example.com') request.headers = {'bar': 'baz'} self.assertEqual(request.headers, {'bar': 'baz'}) def test_null_headers_setter(self): request = HTTPRequest('http://example.com') request.headers = None self.assertEqual(request.headers, {}) def test_body(self): request = HTTPRequest('http://example.com', body='foo') self.assertEqual(request.body, utf8('foo')) def test_body_setter(self): request = HTTPRequest('http://example.com') request.body = 'foo' self.assertEqual(request.body, utf8('foo')) def test_if_modified_since(self): http_date = datetime.datetime.utcnow() request = HTTPRequest('http://example.com', if_modified_since=http_date) self.assertEqual(request.headers, {'If-Modified-Since': format_timestamp(http_date)}) class HTTPErrorTestCase(unittest.TestCase): def test_copy(self): e = HTTPError(403) e2 = copy.copy(e) self.assertIsNot(e, e2) self.assertEqual(e.code, e2.code) def test_plain_error(self): e = HTTPError(403) self.assertEqual(str(e), "HTTP 403: Forbidden") self.assertEqual(repr(e), "HTTP 403: Forbidden") def test_error_with_response(self): resp = HTTPResponse(HTTPRequest('http://example.com/'), 403) with self.assertRaises(HTTPError) as cm: resp.rethrow() e = cm.exception self.assertEqual(str(e), "HTTP 403: Forbidden") self.assertEqual(repr(e), "HTTP 403: Forbidden")
multiprocessing7_lock.py
# View more 3_python 1_tensorflow_new tutorial on my Youtube and Youku channel!!! # Youtube video tutorial: https://www.youtube.com/channel/UCdyjiB5H8Pu7aDTNVXTTpcg # Youku video tutorial: http://i.youku.com/pythontutorial import multiprocessing as mp import time def job(v, num, l): l.acquire() for _ in range(10): time.sleep(0.1) v.value += num print(v.value) l.release() def multicore(): l = mp.Lock() v = mp.Value('i', 0) p1 = mp.Process(target=job, args=(v, 1, l)) p2 = mp.Process(target=job, args=(v, 3, l)) p1.start() p2.start() p1.join() p2.join() if __name__ == '__main__': multicore()
threads.py
import time from threading import Thread, Event def countdown(n, event): while n: event.set() print(n) time.sleep(1.5) n -= 1 event = Event() t = Thread(target=countdown, args=(100, event)) class CountDown: def __init__(self): self._running = True def terminate(self): self._running = False def run(self, n): m = 0 while self._running and n: print(f'{n} seconds remaining') time.sleep(2) m += 2 print(f'{m} seconds gone') n -= 1 cd = CountDown() f = Thread(target=cd.run, args=(20,))
PreDebugWatchDev.py
import os import platform import subprocess from threading import Thread import time import sys import random import socket print("SqBuild: Python ver: " + platform.python_version() + " (" + platform.architecture()[0] + "), CWD:'" + os. getcwd() + "'") if (os.getcwd().endswith("SqCore")) : # VsCode's context menu 'Run Python file in Terminal' runs it from the workspace folder. VsCode F5 runs it from the project folder. We change it to the project folder os.chdir(os.getcwd() + "/src/WebServer/SqCoreWeb") # 1. Basic checks: Ensure Node.js is installed. If node_modules folder is empty, it should restore Npm packages. nodeTouchFile = os.getcwd() + "/node_modules/.install-stamp" if os.path.isfile(nodeTouchFile): print ("SqBuild: /node_modules/ exist") else: nodeRetCode = os.system("node --version") # don't want to run 'node --version' all the times. If stamp file exists, assume node.exe is installed if (nodeRetCode != 0) : sys.exit("Node.js is required to build and run this project. To continue, please install Node.js from https://nodejs.org/") sys.exit("SqBuild: PreDebugBuildDev.py checks /node_modules in parallel. And we don't want that both processes start to download that huge folder. Exit now. It only happens once per year.") angularRetCode = os.system("ng --version") if (angularRetCode != 0) : sys.exit("SqBuild: NodeJs's AngularCLI is required to build and run this project. To continue, please install 'npm install -g @angular/cli@9.0.0-rc.10' on (2020-01-29) ") # os.system("npm install") # Path(nodeTouchFile).touch() # 2. What can Debug user watch: wwwrootGeneral (NonWebpack), ExampleCsServerPushInRealtime (Webpack), HealthMonitor (Angular), MarketDashboard (Angular) def threaded_function(commandStr): print("SqBuild: Executing in separate thread: " + commandStr) os.system(commandStr) # works like normal, loads ./tsconfig.json, which contains "include": ["wwwroot"]. #processObj = subprocess.run("tsc --watch", shell=True, stdout=subprocess.PIPE) # This will run the command and return any output into process.output def startShellCallingThread(pCommandStr): thread = Thread(target = threaded_function, args = (pCommandStr,)) thread.setDaemon(True) # daemon = true didn't help. Main thread exited, but watchers were left alive. thread.start() #thread1.join() # 2.1 Non-Webpack webapps in ./wwwroot/webapps should be transpiled from TS to JS # os.system("tsc --watch") # works like normal, loads ./tsconfig.json, which contains "include": ["wwwroot"]. # subprocess.run(["tsc", "--watch"], stdout=subprocess.PIPE) # This will run the command and return any output # subprocess.run("tsc --watch", shell=True, stdout=subprocess.PIPE) # subprocess.run("tsc --watch", shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # This will run the command and return any output # thread1 = Thread(target = threaded_function, args = ("tsc --watch",)) # thread1.setDaemon(True) # daemon = true didn't help. Main thread exited, but watchers were left alive. # thread1.start() #thread1.join() startShellCallingThread("tsc --watch") # 2.2 Webpack webapps in ./webapps should be packed (TS, CSS, HTML) # Webpack: 'Multiple output files' are not possible and out of scope of webpack. You can use a build system. # thread2 = Thread(target = threaded_function, args = ("npx webpack --config webapps/ExampleCsServerPushInRealtime/webpack.config.js --mode=development --watch",)) # thread2.setDaemon(True) # thread2.start() #thread2.join() startShellCallingThread("npx webpack --config webapps/ExampleCsServerPushInRealtime/webpack.config.js --mode=development --watch") # 2.3 Angular webapps in the project folder should be served on different ports. If an Angular app is not developed any more, comment it out to save resources # ng serve doesn't create anything into --output-path=wwwroot/webapps/ (it keeps its files temp, maybe in RAM) # to create files into wwwroot/weapps, at publish run 'ng build HealthMonitor --prod --output-path=wwwroot/webapps/HealthMonitor --base-href ./' startShellCallingThread("ng serve --proxy-config angular.watch.proxy.conf.js HealthMonitor --port 4201") startShellCallingThread("ng serve --proxy-config angular.watch.proxy.conf.js MarketDashboard --port 4202") # 3. Wait for Python message to terminate all threads. print("SqBuild: User can break (Control-C, or closing CMD) here manually. Or Wait for socket (TCP port) communication from another Python thread to end this thread.") # Named pipes are nothing but mechanisms that allow IPC communication through the use of file descriptors associated with special files # Let's use the be basic socket, because it is platform-independent. (and it is not file based), and we can use it easily in C# interop to Python (even under Linux). serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # versus AF_LOCAL, reliable, bidirectional serversocket.bind(('localhost', 8389)) # bind the server socket to 'localhost'. On most platforms, this will take a shortcut around a couple of layers of network code and be quite a bit faster. serversocket.listen(5) # become a server socket, maximum 5 connections, queue up as many as 5 connect requests (the normal max) before refusing outside connections. while True: connection, address = serversocket.accept() buf = connection.recv(64) if len(buf) > 0: print("SqBuild: Socket received message: " + str(buf)) break # 4. Terminate all threads. It not only terminates threads, but 'taskkill current process' will close the CMD window as well, which is perfect. print("SqBuild: Main thread exits now as it kill the the CMD/terminal window") # quit(), sys.exit() # they don't kill the started child-threads, even though they are daemons uniqueCmdTitle = "PreDebugWatch.py." + str(random.randint(0,99999)) os.system("title " + uniqueCmdTitle) # set the title of the CMD os.system('taskkill /F /FI "WindowTitle eq ' + uniqueCmdTitle + '" /T') #kill the task which has the title that was just set.
service_handler.py
""" Deals with the webserver and the service modules """ import importlib import BaseHTTPServer import SimpleHTTPServer from SocketServer import ThreadingMixIn import threading from time import sleep from utils import * import json import MySQLdb import warnings import base_service import atexit #Web server Protocol = "HTTP/1.0" ServerPort = 8850 #The string to search for when finding relevant databases db_prepend = 'AdelaideX' class ServiceManager(): """ Manages service modules """ servicethreads = [] servicemodules = [] sql_db = None def __init__(self): log("Starting Service Manager") self.setup_ingest_database() def load_services(self): """ Loads each module and removes any previously uncompleted sessions """ root_db = MySQLdb.connect(host=config.SQL_HOST, user=config.SQL_USERNAME, passwd=config.SQL_PASSWORD, db='api', local_infile=1) cur = root_db.cursor() query = "UPDATE ingestor SET started=0, started_date=NULL WHERE completed=0 AND started=1;" cur.execute(query) servicespath = os.path.join(basepath, 'services') for servicename in os.listdir(servicespath): if servicename not in config.IGNORE_SERVICES: servicepath = os.path.join(servicespath, servicename, 'service.py') if os.path.exists(servicepath): log("Starting module: "+servicename) service_module = importlib.import_module('services.' + servicename + '.service') service_module.manager = self self.servicemodules.append(service_module) #Start Thread servicethread = threading.Thread(target=service_module.service) servicethread.daemon = True servicethread.start() self.servicethreads.append(servicethread) def setup_ingest_database(self): """ Ensures that the required DB and tables exist """ warnings.filterwarnings('ignore', category=MySQLdb.Warning) #Create and connect to the API database log("Testing Database existance") try: self.sql_db = MySQLdb.connect(host=config.SQL_HOST, user=config.SQL_USERNAME, passwd=config.SQL_PASSWORD, db='api', local_infile=1) except MySQLdb.OperationalError: self.sql_db = MySQLdb.connect(host=config.SQL_HOST, user=config.SQL_USERNAME, passwd=config.SQL_PASSWORD, db='mysql', local_infile=1) cur = self.sql_db.cursor() cur.execute("CREATE DATABASE api") self.sql_db = MySQLdb.connect(host=config.SQL_HOST, user=config.SQL_USERNAME, passwd=config.SQL_PASSWORD, db='api', local_infile=1) if self.sql_db: log("Creating table API") #Create the ingestor table if necessary cur = self.sql_db.cursor() query = "CREATE TABLE IF NOT EXISTS ingestor ( " query += "id int NOT NULL UNIQUE AUTO_INCREMENT, service_name varchar(255), type varchar(255), meta varchar(255), started int DEFAULT 0, completed int DEFAULT 0, created datetime NULL, started_date datetime NULL, completed_date datetime NULL, PRIMARY KEY (id)" query += ");" cur.execute(query) warnings.filterwarnings('always', category=MySQLdb.Warning) def add_to_ingestion(self, service_name, ingest_type, meta): """ Inserts a line into the ingestion table :param meta: any information the service needs :param ingest_type: the type of the ingestion :param service_name: the name of the service """ insert = True cur = self.sql_db.cursor() #Check if entry already exists query = 'SELECT count(*) FROM ingestor WHERE service_name="' + service_name + '" AND type="' + ingest_type + '" AND meta="' + meta + '";' cur.execute(query) for row in cur.fetchall(): if int(row[0]) > 0: insert = False if insert: #Insert the new entry created = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') query = 'INSERT INTO ingestor (service_name,type,meta,created) VALUES (' query += '"'+service_name+'","'+ingest_type+'","'+meta+'","'+created+'");' cur.execute(query) self.sql_db.commit() def get_status(service_name): """ Gets the current status of a service :param service_name: the name of the service :return: a dictionary of the services status """ api_db = MySQLdb.connect(host=config.SQL_HOST, user=config.SQL_USERNAME, passwd=config.SQL_PASSWORD, db='api', local_infile=1) status = {'name': service_name} cur = api_db.cursor() query = 'SELECT type, meta, started, completed, started_date, completed_date FROM ingestor WHERE service_name="' + service_name + '" AND started=1 ORDER BY created;' cur.execute(query) status['status'] = 'stopped' hasany = False status['task'] = '' status['file'] = '' status['startdate'] = '' status['lastcompletedate'] = '' status['tasksleft'] = 0 for row in cur.fetchall(): if not hasany: status['status'] = 'sleeping' hasany = True if row[2] == 1 and row[3] == 0: status['status'] = 'running' status['task'] = row[0] if row[4] is not None: status['startdate'] = row[4].strftime('%Y-%m-%d %H:%M:%S') else: status['startdate'] = '' if status['task'] == 'file': status['file'] = os.path.basename(row[1]) status['tasksleft'] += 1 elif row[2] == 1 and row[3] == 1: if row[5] is not None: status['lastcompletedate'] = row[5].strftime('%Y%m%d %H:%M:%S') else: status['lastcompletedate'] = '' elif row[2] == 0: status['tasksleft'] += 1 return status def queue_data(servicehandler): """ Asks each service for which files it needs, and adds them to the ingestor :param servicehandler: The service handler :return: Returns True when completed """ for path in config.DATA_PATHS: path = os.path.realpath(path) for service_module in ServiceManager.servicemodules: required_files = service_module.get_files(path) for required_file in required_files: # print('ingesting.. ' + required_file) # Add file to the ingestion table servicehandler.manager.add_to_ingestion(service_module.name(), 'file', os.path.realpath(required_file)) return True def remove_all_data(): """ Completely wipes the ingestion, should never be used apart from testing :return: Returns True when completed """ root_db = MySQLdb.connect(host=config.SQL_HOST, user=config.SQL_USERNAME, passwd=config.SQL_PASSWORD, db='api', local_infile=1) cur = root_db.cursor() query = "SHOW DATABASES;" cur.execute(query) for row in cur.fetchall(): if row[0].find(db_prepend) > -1 or row[0] == 'Person_Course': #Drop the relevant DBs query = "DROP DATABASE "+row[0] cur.execute(query) root_db.commit() log("*** Removing database "+row[0]) #Empty the ingestor pcourse_db = MySQLdb.connect(host=config.SQL_HOST, user=config.SQL_USERNAME, passwd=config.SQL_PASSWORD, db='api', local_infile=1) pcur = pcourse_db.cursor() query = "TRUNCATE ingestor" pcur.execute(query) pcourse_db.commit() log("*** Resetting ingestor cache") #Delete the mongoDB cmd = config.MONGO_PATH + "mongo " + config.MONGO_HOST + "/logs --eval \"db.dropDatabase()\"" os.system(cmd) class RequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): """ Responds to HTTP Requests """ response = 0 servicehandler = None # def runmodule(self, modulename, meta): # """ # Executes a module to run (usually a module which does not loop) # This may not be needed in the new ingestor # """ # servicespath = os.path.join(basepath, 'services') # servicepath = os.path.join(servicespath, modulename, 'service.py') # if os.path.exists(servicepath): # pass # #log("Starting once-off module "+servicename) # #servicemodule = baseservice.load_module(servicename) # #print meta # #servicethread = threading.Thread(target=servicemodule.runservice, args=meta) # #servicethread.start() def do_GET(self): """ Response to GET requests """ response = {} self.send_response(200) self.send_header('Content-Type', 'application/json') self.send_header("Access-Control-Allow-Origin", "*") self.end_headers() if self.path == "/status": status = {} for sv in ServiceManager.servicemodules: name = sv.name() status[name] = get_status(name) response['response'] = status elif self.path == "/newdata": response['response'] = 'Could not queue data' response['statuscode'] = 500 if queue_data(self.servicehandler): response['response'] = 'Data successfully queued' response['statuscode'] = 200 elif self.path == "/": response['response'] = 'Ingestion running' response['statuscode'] = 200 else: response['response'] = "error" response['statuscode'] = 404 self.wfile.write(json.dumps(response)) class ThreadedHTTPServer(ThreadingMixIn, BaseHTTPServer.HTTPServer): """ Threaded HTTP Server """ allow_reuse_address = True def shutdown(self): """ Shuts down the HTTP Server """ self.socket.close() BaseHTTPServer.HTTPServer.shutdown(self) class Servicehandler(): """ Service handler deals with the threaded nature of the application """ server = None server_thread = None manager = None # Instance _instance = None def __new__(cls, *args, **kwargs): if not cls._instance: cls._instance = super(Servicehandler, cls).__new__(cls, *args, **kwargs) return cls._instance def __init__(self): self.manager = ServiceManager() #@todo remove this #remove_all_data() self.manager.load_services() print "FINISHED LOADING SERVICES" self.setup_webserver() def setup_webserver(self): """ Creates and starts the web server """ RequestHandler.servicehandler = self server_address = ('0.0.0.0', ServerPort) RequestHandler.protocol_version = Protocol self.server = ThreadedHTTPServer(server_address, RequestHandler) log("Starting Web Server") self.server_thread = threading.Thread(target=self.server.serve_forever) self.server_thread.daemon = True self.server_thread.start() #@todo remove this queue_data(self) print "SLEEPING NOW" self.sleepmainthread() def sleepmainthread(self): """ Sleeps the main thread """ while True and base_service.ALIVE: sleep(2) print "Exiting ingestor"
adb_profile_chrome.py
#!/usr/bin/env python # # Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import gzip import logging import optparse import os import re import select import shutil import sys import threading import time import webbrowser import zipfile import zlib from pylib import android_commands from pylib import cmd_helper from pylib import constants from pylib import pexpect _TRACE_VIEWER_ROOT = os.path.join(constants.DIR_SOURCE_ROOT, 'third_party', 'trace-viewer') sys.path.append(_TRACE_VIEWER_ROOT) from trace_viewer.build import trace2html # pylint: disable=F0401 _DEFAULT_CHROME_CATEGORIES = '_DEFAULT_CHROME_CATEGORIES' def _GetTraceTimestamp(): return time.strftime('%Y-%m-%d-%H%M%S', time.localtime()) class ChromeTracingController(object): def __init__(self, adb, package_info, categories, ring_buffer): self._adb = adb self._package_info = package_info self._categories = categories self._ring_buffer = ring_buffer self._trace_file = None self._trace_interval = None self._trace_start_re = \ re.compile(r'Logging performance trace to file: (.*)') self._trace_finish_re = \ re.compile(r'Profiler finished[.] Results are in (.*)[.]') self._adb.StartMonitoringLogcat(clear=False) def __str__(self): return 'chrome trace' def StartTracing(self, interval): self._trace_interval = interval self._adb.SyncLogCat() self._adb.BroadcastIntent(self._package_info.package, 'GPU_PROFILER_START', '-e categories "%s"' % ','.join(self._categories), '-e continuous' if self._ring_buffer else '') # Chrome logs two different messages related to tracing: # # 1. "Logging performance trace to file [...]" # 2. "Profiler finished. Results are in [...]" # # The first one is printed when tracing starts and the second one indicates # that the trace file is ready to be pulled. try: self._trace_file = self._adb.WaitForLogMatch(self._trace_start_re, None, timeout=5).group(1) except pexpect.TIMEOUT: raise RuntimeError('Trace start marker not found. Is the correct version ' 'of the browser running?') def StopTracing(self): if not self._trace_file: return self._adb.BroadcastIntent(self._package_info.package, 'GPU_PROFILER_STOP') self._adb.WaitForLogMatch(self._trace_finish_re, None, timeout=120) def PullTrace(self): # Wait a bit for the browser to finish writing the trace file. time.sleep(self._trace_interval / 4 + 1) trace_file = self._trace_file.replace('/storage/emulated/0/', '/sdcard/') host_file = os.path.join(os.path.curdir, os.path.basename(trace_file)) self._adb.PullFileFromDevice(trace_file, host_file) return host_file _SYSTRACE_OPTIONS = [ # Compress the trace before sending it over USB. '-z', # Use a large trace buffer to increase the polling interval. '-b', '16384' ] # Interval in seconds for sampling systrace data. _SYSTRACE_INTERVAL = 15 class SystraceController(object): def __init__(self, adb, categories, ring_buffer): self._adb = adb self._categories = categories self._ring_buffer = ring_buffer self._done = threading.Event() self._thread = None self._trace_data = None def __str__(self): return 'systrace' @staticmethod def GetCategories(adb): return adb.RunShellCommand('atrace --list_categories') def StartTracing(self, _): self._thread = threading.Thread(target=self._CollectData) self._thread.start() def StopTracing(self): self._done.set() def PullTrace(self): self._thread.join() self._thread = None if self._trace_data: output_name = 'systrace-%s' % _GetTraceTimestamp() with open(output_name, 'w') as out: out.write(self._trace_data) return output_name def _RunATraceCommand(self, command): # We use a separate interface to adb because the one from AndroidCommands # isn't re-entrant. device = ['-s', self._adb.GetDevice()] if self._adb.GetDevice() else [] cmd = ['adb'] + device + ['shell', 'atrace', '--%s' % command] + \ _SYSTRACE_OPTIONS + self._categories return cmd_helper.GetCmdOutput(cmd) def _CollectData(self): trace_data = [] self._RunATraceCommand('async_start') try: while not self._done.is_set(): self._done.wait(_SYSTRACE_INTERVAL) if not self._ring_buffer or self._done.is_set(): trace_data.append( self._DecodeTraceData(self._RunATraceCommand('async_dump'))) finally: trace_data.append( self._DecodeTraceData(self._RunATraceCommand('async_stop'))) self._trace_data = ''.join([zlib.decompress(d) for d in trace_data]) @staticmethod def _DecodeTraceData(trace_data): try: trace_start = trace_data.index('TRACE:') except ValueError: raise RuntimeError('Systrace start marker not found') trace_data = trace_data[trace_start + 6:] # Collapse CRLFs that are added by adb shell. if trace_data.startswith('\r\n'): trace_data = trace_data.replace('\r\n', '\n') # Skip the initial newline. return trace_data[1:] def _GetSupportedBrowsers(): # Add aliases for backwards compatibility. supported_browsers = { 'stable': constants.PACKAGE_INFO['chrome_stable'], 'beta': constants.PACKAGE_INFO['chrome_beta'], 'dev': constants.PACKAGE_INFO['chrome_dev'], 'build': constants.PACKAGE_INFO['chrome'], } supported_browsers.update(constants.PACKAGE_INFO) unsupported_browsers = ['content_browsertests', 'gtest', 'legacy_browser'] for browser in unsupported_browsers: del supported_browsers[browser] return supported_browsers def _CompressFile(host_file, output): with gzip.open(output, 'wb') as out: with open(host_file, 'rb') as input_file: out.write(input_file.read()) os.unlink(host_file) def _ArchiveFiles(host_files, output): with zipfile.ZipFile(output, 'w', zipfile.ZIP_DEFLATED) as z: for host_file in host_files: z.write(host_file) os.unlink(host_file) def _PackageTracesAsHtml(trace_files, html_file): with open(html_file, 'w') as f: trace2html.WriteHTMLForTracesToFile(trace_files, f) for trace_file in trace_files: os.unlink(trace_file) def _PrintMessage(heading, eol='\n'): sys.stdout.write('%s%s' % (heading, eol)) sys.stdout.flush() def _WaitForEnter(timeout): select.select([sys.stdin], [], [], timeout) def _StartTracing(controllers, interval): for controller in controllers: controller.StartTracing(interval) def _StopTracing(controllers): for controller in controllers: controller.StopTracing() def _PullTraces(controllers, output, compress, write_json): _PrintMessage('Downloading...', eol='') trace_files = [] for controller in controllers: trace_files.append(controller.PullTrace()) if not write_json: html_file = os.path.splitext(trace_files[0])[0] + '.html' _PackageTracesAsHtml(trace_files, html_file) trace_files = [html_file] if compress and len(trace_files) == 1: result = output or trace_files[0] + '.gz' _CompressFile(trace_files[0], result) elif len(trace_files) > 1: result = output or 'chrome-combined-trace-%s.zip' % _GetTraceTimestamp() _ArchiveFiles(trace_files, result) elif output: result = output shutil.move(trace_files[0], result) else: result = trace_files[0] _PrintMessage('done') _PrintMessage('Trace written to file://%s' % os.path.abspath(result)) return result def _CaptureAndPullTrace(controllers, interval, output, compress, write_json): trace_type = ' + '.join(map(str, controllers)) try: _StartTracing(controllers, interval) if interval: _PrintMessage('Capturing %d-second %s. Press Enter to stop early...' % \ (interval, trace_type), eol='') _WaitForEnter(interval) else: _PrintMessage('Capturing %s. Press Enter to stop...' % trace_type, eol='') raw_input() finally: _StopTracing(controllers) if interval: _PrintMessage('done') return _PullTraces(controllers, output, compress, write_json) def _ComputeChromeCategories(options): categories = [] if options.trace_frame_viewer: categories.append('disabled-by-default-cc.debug') if options.trace_ubercompositor: categories.append('disabled-by-default-cc.debug*') if options.trace_gpu: categories.append('disabled-by-default-gpu.debug*') if options.trace_flow: categories.append('disabled-by-default-toplevel.flow') if options.chrome_categories: categories += options.chrome_categories.split(',') return categories def _ComputeSystraceCategories(options): if not options.systrace_categories: return [] return options.systrace_categories.split(',') def main(): parser = optparse.OptionParser(description='Record about://tracing profiles ' 'from Android browsers. See http://dev.' 'chromium.org/developers/how-tos/trace-event-' 'profiling-tool for detailed instructions for ' 'profiling.') timed_options = optparse.OptionGroup(parser, 'Timed tracing') timed_options.add_option('-t', '--time', help='Profile for N seconds and ' 'download the resulting trace.', metavar='N', type='float') parser.add_option_group(timed_options) cont_options = optparse.OptionGroup(parser, 'Continuous tracing') cont_options.add_option('--continuous', help='Profile continuously until ' 'stopped.', action='store_true') cont_options.add_option('--ring-buffer', help='Use the trace buffer as a ' 'ring buffer and save its contents when stopping ' 'instead of appending events into one long trace.', action='store_true') parser.add_option_group(cont_options) categories = optparse.OptionGroup(parser, 'Trace categories') categories.add_option('-c', '--categories', help='Select Chrome tracing ' 'categories with comma-delimited wildcards, ' 'e.g., "*", "cat1*,-cat1a". Omit this option to trace ' 'Chrome\'s default categories. Chrome tracing can be ' 'disabled with "--categories=\'\'".', metavar='CHROME_CATEGORIES', dest='chrome_categories', default=_DEFAULT_CHROME_CATEGORIES) categories.add_option('-s', '--systrace', help='Capture a systrace with the ' 'chosen comma-delimited systrace categories. You can ' 'also capture a combined Chrome + systrace by enabling ' 'both types of categories. Use "list" to see the ' 'available categories. Systrace is disabled by ' 'default.', metavar='SYS_CATEGORIES', dest='systrace_categories', default='') categories.add_option('--trace-cc', help='Deprecated, use --trace-frame-viewer.', action='store_true') categories.add_option('--trace-frame-viewer', help='Enable enough trace categories for ' 'compositor frame viewing.', action='store_true') categories.add_option('--trace-ubercompositor', help='Enable enough trace categories for ' 'ubercompositor frame data.', action='store_true') categories.add_option('--trace-gpu', help='Enable extra trace categories for ' 'GPU data.', action='store_true') categories.add_option('--trace-flow', help='Enable extra trace categories ' 'for IPC message flows.', action='store_true') parser.add_option_group(categories) output_options = optparse.OptionGroup(parser, 'Output options') output_options.add_option('-o', '--output', help='Save trace output to file.') output_options.add_option('--json', help='Save trace as raw JSON instead of ' 'HTML.', action='store_true') output_options.add_option('--view', help='Open resulting trace file in a ' 'browser.', action='store_true') parser.add_option_group(output_options) browsers = sorted(_GetSupportedBrowsers().keys()) parser.add_option('-b', '--browser', help='Select among installed browsers. ' 'One of ' + ', '.join(browsers) + ', "stable" is used by ' 'default.', type='choice', choices=browsers, default='stable') parser.add_option('-v', '--verbose', help='Verbose logging.', action='store_true') parser.add_option('-z', '--compress', help='Compress the resulting trace ' 'with gzip. ', action='store_true') options, _args = parser.parse_args() if options.trace_cc: parser.parse_error("""--trace-cc is deprecated. For basic jank busting uses, use --trace-frame-viewer For detailed study of ubercompositor, pass --trace-ubercompositor. When in doubt, just try out --trace-frame-viewer. """) if options.verbose: logging.getLogger().setLevel(logging.DEBUG) adb = android_commands.AndroidCommands() if options.systrace_categories in ['list', 'help']: _PrintMessage('\n'.join(SystraceController.GetCategories(adb))) return 0 if not options.time and not options.continuous: _PrintMessage('Time interval or continuous tracing should be specified.') return 1 chrome_categories = _ComputeChromeCategories(options) systrace_categories = _ComputeSystraceCategories(options) package_info = _GetSupportedBrowsers()[options.browser] if chrome_categories and 'webview' in systrace_categories: logging.warning('Using the "webview" category in systrace together with ' 'Chrome tracing results in duplicate trace events.') controllers = [] if chrome_categories: controllers.append(ChromeTracingController(adb, package_info, chrome_categories, options.ring_buffer)) if systrace_categories: controllers.append(SystraceController(adb, systrace_categories, options.ring_buffer)) if not controllers: _PrintMessage('No trace categories enabled.') return 1 if options.output: options.output = os.path.expanduser(options.output) result = _CaptureAndPullTrace(controllers, options.time if not options.continuous else 0, options.output, options.compress, options.json) if options.view: if sys.platform == 'darwin': os.system('/usr/bin/open %s' % os.path.abspath(result)) else: webbrowser.open(result) if __name__ == '__main__': sys.exit(main())
test_index.py
import pytest from base.client_base import TestcaseBase from base.index_wrapper import ApiIndexWrapper from utils.util_log import test_log as log from common import common_func as cf from common import common_type as ct from common.common_type import CaseLabel, CheckTasks from common.code_mapping import CollectionErrorMessage as clem from common.code_mapping import IndexErrorMessage as iem from utils.utils import * from common.constants import * prefix = "index" default_schema = cf.gen_default_collection_schema() default_field_name = ct.default_float_vec_field_name default_index_params = {"index_type": "IVF_SQ8", "metric_type": "L2", "params": {"nlist": 64}} # copied from pymilvus uid = "test_index" BUILD_TIMEOUT = 300 field_name = default_float_vec_field_name binary_field_name = default_binary_vec_field_name query, query_vecs = gen_query_vectors(field_name, default_entities, default_top_k, 1) default_index = {"index_type": "IVF_FLAT", "params": {"nlist": 128}, "metric_type": "L2"} class TestIndexParams(TestcaseBase): """ Test case of index interface """ @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("collection", [None, "coll"]) def test_index_non_collection(self, collection): """ target: test index with None collection method: input none collection object expected: raise exception """ self._connect() self.index_wrap.init_index(collection, default_field_name, default_index_params, check_task=CheckTasks.err_res, check_items={ct.err_code: 0, ct.err_msg: clem.CollectionType}) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("field_name", ct.get_invalid_strs) def test_index_field_name_invalid(self, field_name): """ target: test index with error field name method: input field name expected: raise exception """ collection_name = cf.gen_unique_str(prefix) collection_w = self.init_collection_wrap(name=collection_name) log.error(iem.WrongFieldName % str(field_name)) self.index_wrap.init_index(collection_w.collection, field_name, default_index_params, check_task=CheckTasks.err_res, check_items={ct.err_code: 1, ct.err_msg: iem.WrongFieldName % str(field_name)}) @pytest.mark.tags(CaseLabel.L1) def test_index_field_name_not_existed(self): """ target: test index with error field name method: input field name not created expected: raise exception """ c_name = cf.gen_unique_str(prefix) f_name = cf.gen_unique_str(prefix) collection_w = self.init_collection_wrap(name=c_name) self.index_wrap.init_index(collection_w.collection, f_name, default_index_params, check_task=CheckTasks.err_res, check_items={ct.err_code: 1, ct.err_msg: f"cannot create index on non-existed field: {f_name}"}) @pytest.mark.tags(CaseLabel.L0) # TODO (reason="pymilvus issue #677", raises=TypeError) @pytest.mark.parametrize("index_type", ct.get_invalid_strs) def test_index_type_invalid(self, index_type): """ target: test index with error index type method: input invalid index type expected: raise exception """ c_name = cf.gen_unique_str(prefix) collection_w = self.init_collection_wrap(name=c_name) index_params = copy.deepcopy(default_index_params) index_params["index_type"] = index_type if not isinstance(index_params["index_type"], str): msg = "must be str" else: msg = "Invalid index_type" self.index_wrap.init_index(collection_w.collection, default_field_name, index_params, check_task=CheckTasks.err_res, check_items={ct.err_code: 1, ct.err_msg: msg}) @pytest.mark.tags(CaseLabel.L1) def test_index_type_not_supported(self): """ target: test index with error index type method: input unsupported index type expected: raise exception """ c_name = cf.gen_unique_str(prefix) collection_w = self.init_collection_wrap(name=c_name) index_params = copy.deepcopy(default_index_params) index_params["index_type"] = "IVFFFFFFF" self.index_wrap.init_index(collection_w.collection, default_field_name, index_params, check_task=CheckTasks.err_res, check_items={ct.err_code: 1, ct.err_msg: ""}) @pytest.mark.tags(CaseLabel.L1) def test_index_params_invalid(self, get_invalid_index_params): """ target: test index with error index params method: input invalid index params expected: raise exception """ c_name = cf.gen_unique_str(prefix) collection_w = self.init_collection_wrap(name=c_name) index_params = get_invalid_index_params self.index_wrap.init_index(collection_w.collection, default_field_name, index_params, check_task=CheckTasks.err_res, check_items={ct.err_code: 1, ct.err_msg: ""}) # TODO: not supported @pytest.mark.tags(CaseLabel.L1) @pytest.mark.skip(reason='not supported') def test_index_name_invalid(self, get_invalid_index_name): """ target: test index with error index name method: input invalid index name expected: raise exception """ c_name = cf.gen_unique_str(prefix) index_name = get_invalid_index_name collection_w = self.init_collection_wrap(name=c_name) self.index_wrap.init_index(collection_w.collection, default_field_name, default_index_params, check_task=CheckTasks.err_res, check_items={ct.err_code: 1, ct.err_msg: ""}) class TestIndexOperation(TestcaseBase): """ Test case of index interface """ @pytest.mark.tags(CaseLabel.L1) def test_index_collection_empty(self): """ target: test index with empty collection method: Index on empty collection expected: no exception raised """ c_name = cf.gen_unique_str(prefix) collection_w = self.init_collection_wrap(name=c_name) index, _ = self.index_wrap.init_index(collection_w.collection, default_field_name, default_index_params) # TODO: assert index cf.assert_equal_index(index, collection_w.collection.indexes[0]) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("index_param", [default_index_params]) def test_index_params(self, index_param): """ target: test index with all index type/params method: input valid params expected: no exception raised """ c_name = cf.gen_unique_str(prefix) collection_w = self.init_collection_wrap(name=c_name) data = cf.gen_default_list_data(ct.default_nb) collection_w.insert(data=data) index_params = index_param index, _ = self.index_wrap.init_index(collection_w.collection, default_field_name, index_params) # TODO: assert index cf.assert_equal_index(index, collection_w.collection.indexes[0]) @pytest.mark.tags(CaseLabel.L1) def test_index_params_flush(self): """ target: test index with all index type/params method: input valid params expected: no exception raised """ c_name = cf.gen_unique_str(prefix) collection_w = self.init_collection_wrap(name=c_name) data = cf.gen_default_list_data(ct.default_nb) collection_w.insert(data=data) self._connect().flush([collection_w.name]) index, _ = self.index_wrap.init_index(collection_w.collection, default_field_name, default_index_params) # TODO: assert index cf.assert_equal_index(index, collection_w.collection.indexes[0]) assert collection_w.num_entities == ct.default_nb # TODO: not support @pytest.mark.tags(CaseLabel.L1) @pytest.mark.skip(reason='not supported') def test_index_name_dup(self): """ target: test index with duplicate index name method: create index with existed index name create by `collection.create_index` expected: no exception raised """ c_name = cf.gen_unique_str(prefix) index_name = ct.default_index_name collection_w = self.init_collection_wrap(name=c_name) collection_w.collection.create_index(default_field_name, default_index_params, index_name=index_name) self.index_wrap.init_index(collection_w.collection, default_field_name, default_index_params, check_task=CheckTasks.err_res, check_items={ct.err_code: 1, ct.err_msg: ""}) # TODO: server not supported @pytest.mark.tags(CaseLabel.L1) @pytest.mark.skip(reason='not supported') def test_index_field_names(self): """ target: test index on one field, with two indexes method: create index with two different indexes expected: no exception raised """ pass # TODO: server not supported @pytest.mark.tags(CaseLabel.L1) @pytest.mark.skip(reason='not supported') def test_index_fields(self): """ target: test index on two fields, with the same name method: create the same index name with two different fields expected: exception raised """ pass # TODO: server not supported @pytest.mark.tags(CaseLabel.L1) @pytest.mark.skip(reason='not supported') def test_index_fields_B(self): """ target: test index on two fields, with the different name method: create the different index with two different fields expected: no exception raised """ pass # TODO: server not supported @pytest.mark.tags(CaseLabel.L1) @pytest.mark.skip(reason='not supported') def test_index_field_names_eq_maximum(self): """ target: test index on one field, with the different names, num of the names equal to the maximum num supported method: create the different indexes expected: no exception raised """ pass # TODO: server not supported @pytest.mark.tags(CaseLabel.L1) @pytest.mark.skip(reason='not supported') def test_index_field_names_more_maximum(self): """ target: test index on one field, with the different names, num of the names more than the maximum num supported method: create the different indexes expected: exception raised """ pass @pytest.mark.tags(CaseLabel.L1) def test_index_drop_index(self): """ target: test index.drop method: create index by `index`, and then drop it expected: no exception raised """ c_name = cf.gen_unique_str(prefix) collection_w = self.init_collection_wrap(name=c_name) index, _ = self.index_wrap.init_index(collection_w.collection, default_field_name, default_index_params) cf.assert_equal_index(index, collection_w.collection.indexes[0]) self.index_wrap.drop() assert len(collection_w.collection.indexes) == 0 @pytest.mark.tags(CaseLabel.L1) # TODO #7372 def test_index_drop_repeatedly(self): """ target: test index.drop method: create index by `index`, and then drop it twice expected: exception raised """ c_name = cf.gen_unique_str(prefix) collection_w = self.init_collection_wrap(name=c_name) _, _ = self.index_wrap.init_index(collection_w.collection, default_field_name, default_index_params) self.index_wrap.drop() self.index_wrap.drop(check_task=CheckTasks.err_res, check_items={ct.err_code: 1, ct.err_msg: "Index doesn't exist"}) class TestIndexAdvanced(TestcaseBase): """ Test case of index interface """ @pytest.mark.tags(CaseLabel.L2) def test_index_drop_multi_collections(self): """ target: test index.drop method: create indexes by `index`, and then drop it, assert there is one index left expected: exception raised """ c_name = cf.gen_unique_str(prefix) c_name_2 = cf.gen_unique_str(prefix) cw = self.init_collection_wrap(name=c_name) cw2 = self.init_collection_wrap(name=c_name_2) iw_2 = ApiIndexWrapper() self.index_wrap.init_index(cw.collection, default_field_name, default_index_params) index_2, _ = iw_2.init_index(cw2.collection, default_field_name, default_index_params) self.index_wrap.drop() assert cf.assert_equal_index(index_2, cw2.collection.indexes[0]) assert len(cw.collection.indexes) == 0 @pytest.mark.tags(CaseLabel.L2) @pytest.mark.skip(reason='TODO') def test_index_drop_during_inserting(self): """ target: test index.drop during inserting method: create indexes by `index`, and then drop it during inserting entities, make sure async insert expected: no exception raised, insert success """ pass @pytest.mark.tags(CaseLabel.L2) @pytest.mark.skip(reason='TODO') def test_index_drop_during_searching(self): """ target: test index.drop during searching method: create indexes by `index`, and then drop it during searching, make sure async search expected: no exception raised, search success """ pass @pytest.mark.tags(CaseLabel.L2) @pytest.mark.skip(reason='TODO') def test_index_recovery_after_restart(self): """ target: test index still existed after server restart method: create index by `index`, and then restart server, assert index existed expected: index in collection.indexes """ pass @pytest.mark.tags(CaseLabel.L2) @pytest.mark.skip(reason='TODO') def test_index_building_after_restart(self): """ target: index can still build if not finished before server restart method: create index by `index`, and then restart server, assert server is indexing expected: index build finished after server restart """ pass """ ****************************************************************** The following classes are copied from pymilvus test ****************************************************************** """ class TestIndexBase: @pytest.fixture( scope="function", params=gen_simple_index() ) def get_simple_index(self, request, connect): logging.getLogger().info(request.param) # if str(connect._cmd("mode")) == "CPU": # if request.param["index_type"] in index_cpu_not_support(): # pytest.skip("sq8h not support in CPU mode") return copy.deepcopy(request.param) @pytest.fixture( scope="function", params=[ 1, 10, 1111 ], ) def get_nq(self, request): yield request.param """ ****************************************************************** The following cases are used to test `create_index` function ****************************************************************** """ @pytest.mark.tags(CaseLabel.L0) @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_index(self, connect, collection, get_simple_index): """ target: test create index interface method: create collection and add entities in it, create index expected: return search success """ result = connect.insert(collection, default_entities) connect.create_index(collection, field_name, get_simple_index) if get_simple_index["index_type"] != "FLAT": index = connect.describe_index(collection, "") create_target_index(get_simple_index, field_name) assert index == get_simple_index @pytest.mark.tags(CaseLabel.L0) @pytest.mark.skip(reason="Repeat with test_index_field_name_not_existed") def test_create_index_on_field_not_existed(self, connect, collection, get_simple_index): """ target: test create index interface method: create collection and add entities in it, create index on field not existed expected: error raised """ tmp_field_name = gen_unique_str() result = connect.insert(collection, default_entities) with pytest.raises(Exception) as e: connect.create_index(collection, tmp_field_name, get_simple_index) @pytest.mark.tags(CaseLabel.L2) def test_create_index_on_field(self, connect, collection, get_simple_index): """ target: test create index interface method: create collection and add entities in it, create index on other field expected: error raised """ tmp_field_name = "int64" result = connect.insert(collection, default_entities) with pytest.raises(Exception) as e: connect.create_index(collection, tmp_field_name, get_simple_index) @pytest.mark.tags(CaseLabel.L2) @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_index_no_vectors(self, connect, collection, get_simple_index): """ target: test create index interface method: create collection and add entities in it, create index expected: return search success """ connect.create_index(collection, field_name, get_simple_index) if get_simple_index["index_type"] != "FLAT": index = connect.describe_index(collection, "") create_target_index(get_simple_index, field_name) assert index == get_simple_index @pytest.mark.tags(CaseLabel.L2) @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_index_partition(self, connect, collection, get_simple_index): """ target: test create index interface method: create collection, create partition, and add entities in it, create index expected: return search success """ connect.create_partition(collection, default_tag) result = connect.insert(collection, default_entities, partition_name=default_tag) connect.create_index(collection, field_name, get_simple_index) if get_simple_index["index_type"] != "FLAT": index = connect.describe_index(collection, "") create_target_index(get_simple_index, field_name) assert index == get_simple_index @pytest.mark.tags(CaseLabel.L0) @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_index_partition_flush(self, connect, collection, get_simple_index): """ target: test create index interface method: create collection, create partition, and add entities in it, create index expected: return search success """ connect.create_partition(collection, default_tag) result = connect.insert(collection, default_entities, partition_name=default_tag) connect.flush([collection]) connect.create_index(collection, field_name, get_simple_index) if get_simple_index["index_type"] != "FLAT": index = connect.describe_index(collection, "") create_target_index(get_simple_index, field_name) assert index == get_simple_index @pytest.mark.tags(CaseLabel.L2) def test_create_index_without_connect(self, dis_connect, collection): """ target: test create index without connection method: create collection and add entities in it, check if added successfully expected: raise exception """ with pytest.raises(Exception) as e: dis_connect.create_index(collection, field_name, get_simple_index) @pytest.mark.tags(CaseLabel.L0) @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_index_search_with_query_vectors(self, connect, collection, get_simple_index, get_nq): """ target: test create index interface, search with more query vectors method: create collection and add entities in it, create index expected: return search success """ result = connect.insert(collection, default_entities) connect.flush([collection]) connect.create_index(collection, field_name, get_simple_index) logging.getLogger().info(connect.describe_index(collection, "")) nq = get_nq index_type = get_simple_index["index_type"] search_param = get_search_param(index_type) query, vecs = gen_query_vectors(field_name, default_entities, default_top_k, nq, search_params=search_param) connect.load_collection(collection) res = connect.search(collection, query) assert len(res) == nq @pytest.mark.timeout(BUILD_TIMEOUT) @pytest.mark.tags(CaseLabel.L2) def test_create_index_multithread(self, connect, collection, args): """ target: test create index interface with multiprocess method: create collection and add entities in it, create index expected: return search success """ connect.insert(collection, default_entities) def build(connect): connect.create_index(collection, field_name, default_index) if default_index["index_type"] != "FLAT": index = connect.describe_index(collection, "") create_target_index(default_index, field_name) assert index == default_index threads_num = 8 threads = [] for i in range(threads_num): m = get_milvus(host=args["ip"], port=args["port"], handler=args["handler"]) t = MyThread(target=build, args=(m,)) threads.append(t) t.start() time.sleep(0.2) for t in threads: t.join() @pytest.mark.tags(CaseLabel.L0) def test_create_index_collection_not_existed(self, connect): """ target: test create index interface when collection name not existed method: create collection and add entities in it, create index , make sure the collection name not in index expected: create index failed """ collection_name = gen_unique_str(uid) with pytest.raises(Exception) as e: connect.create_index(collection_name, field_name, default_index) @pytest.mark.tags(CaseLabel.L2) @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_index_insert_flush(self, connect, collection, get_simple_index): """ target: test create index method: create collection and create index, add entities in it expected: create index ok, and count correct """ connect.create_index(collection, field_name, get_simple_index) result = connect.insert(collection, default_entities) connect.flush([collection]) stats = connect.get_collection_stats(collection) assert stats["row_count"] == default_nb if get_simple_index["index_type"] != "FLAT": index = connect.describe_index(collection, "") create_target_index(get_simple_index, field_name) assert index == get_simple_index @pytest.mark.tags(CaseLabel.L2) @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_same_index_repeatedly(self, connect, collection, get_simple_index): """ target: check if index can be created repeatedly, with the same create_index params method: create index after index have been built expected: return code success, and search ok """ connect.create_index(collection, field_name, get_simple_index) connect.create_index(collection, field_name, get_simple_index) if get_simple_index["index_type"] != "FLAT": index = connect.describe_index(collection, "") create_target_index(get_simple_index, field_name) assert index == get_simple_index @pytest.mark.tags(CaseLabel.L2) @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_different_index_repeatedly(self, connect, collection): """ target: check if index can be created repeatedly, with the different create_index params method: create another index with different index_params after index have been built expected: return code 0, and describe index result equals with the second index params """ result = connect.insert(collection, default_entities) connect.flush([collection]) indexs = [default_index, {"metric_type":"L2", "index_type": "FLAT", "params":{"nlist": 1024}}] for index in indexs: connect.create_index(collection, field_name, index) connect.release_collection(collection) connect.load_collection(collection) index = connect.describe_index(collection, "") # assert index == indexs[-1] assert not index # FLAT is the last index_type, drop all indexes in server @pytest.mark.tags(CaseLabel.L2) @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_different_index_repeatedly_B(self, connect, collection): """ target: check if index can be created repeatedly, with the different create_index params method: create another index with different index_params after index have been built expected: return code 0, and describe index result equals with the second index params """ result = connect.insert(collection, default_entities) connect.flush([collection]) indexs = [default_index, {"metric_type": "L2", "index_type": "IVF_SQ8", "params": {"nlist": 1024}}] for index in indexs: connect.create_index(collection, field_name, index) connect.release_collection(collection) connect.load_collection(collection) index = connect.describe_index(collection, "") create_target_index(indexs[-1], field_name) assert index == indexs[-1] # assert not index # FLAT is the last index_type, drop all indexes in server @pytest.mark.tags(CaseLabel.L0) @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_index_ip(self, connect, collection, get_simple_index): """ target: test create index interface method: create collection and add entities in it, create index expected: return search success """ result = connect.insert(collection, default_entities) get_simple_index["metric_type"] = "IP" connect.create_index(collection, field_name, get_simple_index) if get_simple_index["index_type"] != "FLAT": index = connect.describe_index(collection, "") create_target_index(get_simple_index, field_name) assert index == get_simple_index @pytest.mark.tags(CaseLabel.L0) @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_index_no_vectors_ip(self, connect, collection, get_simple_index): """ target: test create index interface method: create collection and add entities in it, create index expected: return search success """ get_simple_index["metric_type"] = "IP" connect.create_index(collection, field_name, get_simple_index) if get_simple_index["index_type"] != "FLAT": index = connect.describe_index(collection, "") create_target_index(get_simple_index, field_name) assert index == get_simple_index @pytest.mark.tags(CaseLabel.L2) @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_index_partition_ip(self, connect, collection, get_simple_index): """ target: test create index interface method: create collection, create partition, and add entities in it, create index expected: return search success """ connect.create_partition(collection, default_tag) result = connect.insert(collection, default_entities, partition_name=default_tag) get_simple_index["metric_type"] = "IP" connect.create_index(collection, field_name, get_simple_index) if get_simple_index["index_type"] != "FLAT": index = connect.describe_index(collection, "") create_target_index(get_simple_index, field_name) assert index == get_simple_index @pytest.mark.tags(CaseLabel.L0) @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_index_partition_flush_ip(self, connect, collection, get_simple_index): """ target: test create index interface method: create collection, create partition, and add entities in it, create index expected: return search success """ connect.create_partition(collection, default_tag) result = connect.insert(collection, default_entities, partition_name=default_tag) connect.flush([collection]) get_simple_index["metric_type"] = "IP" connect.create_index(collection, field_name, get_simple_index) if get_simple_index["index_type"] != "FLAT": index = connect.describe_index(collection, "") create_target_index(get_simple_index, field_name) assert index == get_simple_index @pytest.mark.tags(CaseLabel.L0) @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_index_search_with_query_vectors_ip(self, connect, collection, get_simple_index, get_nq): """ target: test create index interface, search with more query vectors method: create collection and add entities in it, create index expected: return search success """ metric_type = "IP" result = connect.insert(collection, default_entities) connect.flush([collection]) get_simple_index["metric_type"] = metric_type connect.create_index(collection, field_name, get_simple_index) connect.load_collection(collection) logging.getLogger().info(connect.describe_index(collection, "")) nq = get_nq index_type = get_simple_index["index_type"] search_param = get_search_param(index_type) query, vecs = gen_query_vectors(field_name, default_entities, default_top_k, nq, metric_type=metric_type, search_params=search_param) res = connect.search(collection, query) assert len(res) == nq @pytest.mark.timeout(BUILD_TIMEOUT) @pytest.mark.tags(CaseLabel.L2) def test_create_index_multithread_ip(self, connect, collection, args): """ target: test create index interface with multiprocess method: create collection and add entities in it, create index expected: return search success """ connect.insert(collection, default_entities) def build(connect): default_index["metric_type"] = "IP" connect.create_index(collection, field_name, default_index) if default_index["index_type"] != "FLAT": index = connect.describe_index(collection, "") create_target_index(default_index, field_name) assert index == default_index threads_num = 8 threads = [] for i in range(threads_num): m = get_milvus(host=args["ip"], port=args["port"], handler=args["handler"]) t = MyThread(target=build, args=(m,)) threads.append(t) t.start() time.sleep(0.2) for t in threads: t.join() @pytest.mark.tags(CaseLabel.L2) def test_create_index_collection_not_existed_ip(self, connect, collection): """ target: test create index interface when collection name not existed method: create collection and add entities in it, create index , make sure the collection name not in index expected: return code not equals to 0, create index failed """ collection_name = gen_unique_str(uid) default_index["metric_type"] = "IP" with pytest.raises(Exception) as e: connect.create_index(collection_name, field_name, default_index) @pytest.mark.tags(CaseLabel.L0) @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_index_no_vectors_insert_ip(self, connect, collection): """ target: test create index interface when there is no vectors in collection, and does not affect the subsequent process method: create collection and add no vectors in it, and then create index, add entities in it expected: return code equals to 0 """ default_index["metric_type"] = "IP" connect.create_index(collection, field_name, default_index) result = connect.insert(collection, default_entities) connect.flush([collection]) stats = connect.get_collection_stats(collection) assert stats["row_count"] == default_nb if default_index["index_type"] != "FLAT": index = connect.describe_index(collection, "") create_target_index(default_index, field_name) assert index == default_index @pytest.mark.tags(CaseLabel.L2) @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_same_index_repeatedly_ip(self, connect, collection): """ target: check if index can be created repeatedly, with the same create_index params method: create index after index have been built expected: return code success, and search ok """ default_index["metric_type"] = "IP" connect.create_index(collection, field_name, default_index) connect.create_index(collection, field_name, default_index) if default_index["index_type"] != "FLAT": index = connect.describe_index(collection, "") create_target_index(default_index, field_name) assert index == default_index @pytest.mark.tags(CaseLabel.L2) @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_different_index_repeatedly_ip(self, connect, collection): """ target: check if index can be created repeatedly, with the different create_index params method: create another index with different index_params after index have been built expected: return code 0, and describe index result equals with the second index params """ result = connect.insert(collection, default_entities) connect.flush([collection]) connect.load_collection(collection) stats = connect.get_collection_stats(collection) assert stats["row_count"] == default_nb default_index["metric_type"] = "IP" indexs = [default_index, {"index_type": "FLAT", "params": {"nlist": 1024}, "metric_type": "IP"}] for index in indexs: connect.create_index(collection, field_name, index) connect.release_collection(collection) connect.load_collection(collection) index = connect.describe_index(collection, "") # assert index == indexs[-1] assert not index """ ****************************************************************** The following cases are used to test `drop_index` function ****************************************************************** """ @pytest.mark.tags(CaseLabel.L0) def test_drop_index(self, connect, collection, get_simple_index): """ target: test drop index interface method: create collection and add entities in it, create index, call drop index expected: return code 0, and default index param """ # result = connect.insert(collection, entities) connect.create_index(collection, field_name, get_simple_index) connect.drop_index(collection, field_name) index = connect.describe_index(collection, "") assert not index @pytest.mark.tags(CaseLabel.L2) # TODO #7372 def test_drop_index_repeatedly(self, connect, collection, get_simple_index): """ target: test drop index repeatedly method: create index, call drop index, and drop again expected: return code 0 """ connect.create_index(collection, field_name, get_simple_index) connect.drop_index(collection, field_name) connect.drop_index(collection, field_name) index = connect.describe_index(collection, "") assert not index @pytest.mark.tags(CaseLabel.L2) def test_drop_index_without_connect(self, dis_connect, collection): """ target: test drop index without connection method: drop index, and check if drop successfully expected: raise exception """ with pytest.raises(Exception) as e: dis_connect.drop_index(collection, field_name) @pytest.mark.tags(CaseLabel.L0) def test_drop_index_collection_not_existed(self, connect): """ target: test drop index interface when collection name not existed method: create collection and add entities in it, create index , make sure the collection name not in index, and then drop it expected: return code not equals to 0, drop index failed """ collection_name = gen_unique_str(uid) with pytest.raises(Exception) as e: connect.drop_index(collection_name, field_name) @pytest.mark.tags(CaseLabel.L0) def test_drop_index_collection_not_create(self, connect, collection): """ target: test drop index interface when index not created method: create collection and add entities in it, create index expected: return code not equals to 0, drop index failed """ # no create index connect.drop_index(collection, field_name) @pytest.mark.tags(CaseLabel.L2) def test_create_drop_index_repeatedly(self, connect, collection, get_simple_index): """ target: test create / drop index repeatedly, use the same index params method: create index, drop index, four times expected: return code 0 """ for i in range(4): connect.create_index(collection, field_name, get_simple_index) connect.drop_index(collection, field_name) @pytest.mark.tags(CaseLabel.L2) def test_drop_index_ip(self, connect, collection, get_simple_index): """ target: test drop index interface method: create collection and add entities in it, create index, call drop index expected: return code 0, and default index param """ # result = connect.insert(collection, entities) get_simple_index["metric_type"] = "IP" connect.create_index(collection, field_name, get_simple_index) connect.drop_index(collection, field_name) index = connect.describe_index(collection, "") assert not index @pytest.mark.tags(CaseLabel.L2) def test_drop_index_repeatedly_ip(self, connect, collection, get_simple_index): """ target: test drop index repeatedly method: create index, call drop index, and drop again expected: return code 0 """ get_simple_index["metric_type"] = "IP" connect.create_index(collection, field_name, get_simple_index) connect.drop_index(collection, field_name) connect.drop_index(collection, field_name) index = connect.describe_index(collection, "") assert not index @pytest.mark.tags(CaseLabel.L2) def test_drop_index_without_connect_ip(self, dis_connect, collection): """ target: test drop index without connection method: drop index, and check if drop successfully expected: raise exception """ with pytest.raises(Exception) as e: dis_connect.drop_index(collection, field_name) @pytest.mark.tags(CaseLabel.L2) def test_drop_index_collection_not_create_ip(self, connect, collection): """ target: test drop index interface when index not created method: create collection and add entities in it, create index expected: return code not equals to 0, drop index failed """ # result = connect.insert(collection, entities) # no create index connect.drop_index(collection, field_name) @pytest.mark.tags(CaseLabel.L2) def test_create_drop_index_repeatedly_ip(self, connect, collection, get_simple_index): """ target: test create / drop index repeatedly, use the same index params method: create index, drop index, four times expected: return code 0 """ get_simple_index["metric_type"] = "IP" for i in range(4): connect.create_index(collection, field_name, get_simple_index) connect.drop_index(collection, field_name) @pytest.mark.tags(CaseLabel.L0) def test_create_PQ_without_nbits(self, connect, collection): """ target: test create PQ index method: create PQ index without nbits expected: create successfully """ PQ_index = {"index_type": "IVF_PQ", "params": {"nlist": 128, "m": 16}, "metric_type": "L2"} result = connect.insert(collection, default_entities) connect.create_index(collection, field_name, PQ_index) index = connect.describe_index(collection, "") create_target_index(PQ_index, field_name) assert index == PQ_index class TestIndexBinary: @pytest.fixture( scope="function", params=gen_simple_index() ) def get_simple_index(self, request, connect): # if str(connect._cmd("mode")) == "CPU": # if request.param["index_type"] in index_cpu_not_support(): # pytest.skip("sq8h not support in CPU mode") return copy.deepcopy(request.param) @pytest.fixture( scope="function", params=gen_binary_index() ) def get_jaccard_index(self, request, connect): if request.param["index_type"] in binary_support(): request.param["metric_type"] = "JACCARD" return request.param else: pytest.skip("Skip index") @pytest.fixture( scope="function", params=gen_binary_index() ) def get_l2_index(self, request, connect): request.param["metric_type"] = "L2" return request.param @pytest.fixture( scope="function", params=[ 1, 10, 1111 ], ) def get_nq(self, request): yield request.param """ ****************************************************************** The following cases are used to test `create_index` function ****************************************************************** """ @pytest.mark.tags(CaseLabel.L2) @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_index(self, connect, binary_collection, get_jaccard_index): """ target: test create index interface method: create collection and add entities in it, create index expected: return search success """ result = connect.insert(binary_collection, default_binary_entities) connect.create_index(binary_collection, binary_field_name, get_jaccard_index) binary_index = connect.describe_index(binary_collection, "") create_target_index(get_jaccard_index, binary_field_name) assert binary_index == get_jaccard_index @pytest.mark.tags(CaseLabel.L0) @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_index_partition(self, connect, binary_collection, get_jaccard_index): """ target: test create index interface method: create collection, create partition, and add entities in it, create index expected: return search success """ connect.create_partition(binary_collection, default_tag) result = connect.insert(binary_collection, default_binary_entities, partition_name=default_tag) connect.create_index(binary_collection, binary_field_name, get_jaccard_index) binary_index = connect.describe_index(binary_collection, "") create_target_index(get_jaccard_index, binary_field_name) assert binary_index == get_jaccard_index @pytest.mark.tags(CaseLabel.L0) @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_index_search_with_query_vectors(self, connect, binary_collection, get_jaccard_index, get_nq): """ target: test create index interface, search with more query vectors method: create collection and add entities in it, create index expected: return search success """ nq = get_nq result = connect.insert(binary_collection, default_binary_entities) connect.flush([binary_collection]) connect.create_index(binary_collection, binary_field_name, get_jaccard_index) connect.load_collection(binary_collection) query, vecs = gen_query_vectors(binary_field_name, default_binary_entities, default_top_k, nq, metric_type="JACCARD") search_param = get_search_param(get_jaccard_index["index_type"], metric_type="JACCARD") logging.getLogger().info(search_param) res = connect.search(binary_collection, query, search_params=search_param) assert len(res) == nq @pytest.mark.timeout(BUILD_TIMEOUT) @pytest.mark.tags(CaseLabel.L2) def test_create_index_invalid_metric_type_binary(self, connect, binary_collection, get_l2_index): """ target: test create index interface with invalid metric type method: add entitys into binary connection, flash, create index with L2 metric type. expected: return create_index failure """ # insert 6000 vectors result = connect.insert(binary_collection, default_binary_entities) connect.flush([binary_collection]) with pytest.raises(Exception) as e: res = connect.create_index(binary_collection, binary_field_name, get_l2_index) """ ****************************************************************** The following cases are used to test `describe_index` function *************************************************************** """ @pytest.mark.skip("repeat with test_create_index binary") def _test_get_index_info(self, connect, binary_collection, get_jaccard_index): """ target: test describe index interface method: create collection and add entities in it, create index, call describe index expected: return code 0, and index instructure """ result = connect.insert(binary_collection, default_binary_entities) connect.flush([binary_collection]) connect.create_index(binary_collection, binary_field_name, get_jaccard_index) stats = connect.get_collection_stats(binary_collection) assert stats["row_count"] == default_nb for partition in stats["partitions"]: segments = partition["segments"] if segments: for segment in segments: for file in segment["files"]: if "index_type" in file: assert file["index_type"] == get_jaccard_index["index_type"] @pytest.mark.skip("repeat with test_create_index_partition binary") def _test_get_index_info_partition(self, connect, binary_collection, get_jaccard_index): """ target: test describe index interface method: create collection, create partition and add entities in it, create index, call describe index expected: return code 0, and index instructure """ connect.create_partition(binary_collection, default_tag) result = connect.insert(binary_collection, default_binary_entities, partition_name=default_tag) connect.flush([binary_collection]) connect.create_index(binary_collection, binary_field_name, get_jaccard_index) stats = connect.get_collection_stats(binary_collection) logging.getLogger().info(stats) assert stats["row_count"] == default_nb assert len(stats["partitions"]) == 2 for partition in stats["partitions"]: segments = partition["segments"] if segments: for segment in segments: for file in segment["files"]: if "index_type" in file: assert file["index_type"] == get_jaccard_index["index_type"] """ ****************************************************************** The following cases are used to test `drop_index` function ****************************************************************** """ @pytest.mark.tags(CaseLabel.L2) def test_drop_index(self, connect, binary_collection, get_jaccard_index): """ target: test drop index interface method: create collection and add entities in it, create index, call drop index expected: return code 0, and default index param """ connect.create_index(binary_collection, binary_field_name, get_jaccard_index) stats = connect.get_collection_stats(binary_collection) logging.getLogger().info(stats) connect.drop_index(binary_collection, binary_field_name) binary_index = connect.describe_index(binary_collection, "") assert not binary_index @pytest.mark.tags(CaseLabel.L0) def test_drop_index_partition(self, connect, binary_collection, get_jaccard_index): """ target: test drop index interface method: create collection, create partition and add entities in it, create index on collection, call drop collection index expected: return code 0, and default index param """ connect.create_partition(binary_collection, default_tag) result = connect.insert(binary_collection, default_binary_entities, partition_name=default_tag) connect.flush([binary_collection]) connect.create_index(binary_collection, binary_field_name, get_jaccard_index) connect.drop_index(binary_collection, binary_field_name) binary_index = connect.describe_index(binary_collection, "") assert not binary_index class TestIndexInvalid(object): """ Test create / describe / drop index interfaces with invalid collection names """ @pytest.fixture( scope="function", params=gen_invalid_strs() ) def get_collection_name(self, request): yield request.param @pytest.mark.tags(CaseLabel.L0) def test_create_index_with_invalid_collection_name(self, connect, get_collection_name): """ target: test create index interface for invalid scenario method: create index with invalid collection name expected: raise exception """ collection_name = get_collection_name with pytest.raises(Exception) as e: connect.create_index(collection_name, field_name, default_index) @pytest.mark.tags(CaseLabel.L2) def test_drop_index_with_invalid_collection_name(self, connect, get_collection_name): """ target: test drop index interface for invalid scenario method: drop index with invalid collection name expected: raise exception """ collection_name = get_collection_name with pytest.raises(Exception) as e: connect.drop_index(collection_name) @pytest.fixture( scope="function", params=gen_invalid_index() ) def get_index(self, request): yield request.param @pytest.mark.tags(CaseLabel.L2) def test_create_index_with_invalid_index_params(self, connect, collection, get_index): """ target: test create index interface for invalid scenario method: create index with invalid index params expected: raise exception """ logging.getLogger().info(get_index) with pytest.raises(Exception) as e: connect.create_index(collection, field_name, get_index) class TestIndexAsync: @pytest.fixture(scope="function", autouse=True) def skip_http_check(self, args): if args["handler"] == "HTTP": pytest.skip("skip in http mode") """ ****************************************************************** The following cases are used to test `create_index` function ****************************************************************** """ @pytest.fixture( scope="function", params=gen_simple_index() ) def get_simple_index(self, request, connect): # if str(connect._cmd("mode")) == "CPU": # if request.param["index_type"] in index_cpu_not_support(): # pytest.skip("sq8h not support in CPU mode") return copy.deepcopy(request.param) def check_result(self, res): logging.getLogger().info("In callback check search result") logging.getLogger().info(res) """ ****************************************************************** The following cases are used to test `create_index` function ****************************************************************** """ @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_index(self, connect, collection, get_simple_index): """ target: test create index interface method: create collection and add entities in it, create index expected: return search success """ result = connect.insert(collection, default_entities) logging.getLogger().info("start index") future = connect.create_index(collection, field_name, get_simple_index, _async=True) logging.getLogger().info("before result") res = future.result() # TODO: logging.getLogger().info(res) @pytest.mark.tags(CaseLabel.L0) @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_index_drop(self, connect, collection): """ target: test create index interface method: create collection and add entities in it, create index expected: return search success """ result = connect.insert(collection, default_entities) connect.create_index(collection, field_name, default_index, _async=True) connect.drop_collection(collection) with pytest.raises(Exception, match=f'DescribeIndex failed, error = collection {collection} not found'): connect.describe_index(collection, "") @pytest.mark.tags(CaseLabel.L2) def test_create_index_with_invalid_collection_name(self, connect): collection_name = " " with pytest.raises(Exception) as e: future = connect.create_index(collection_name, field_name, default_index, _async=True) res = future.result() @pytest.mark.tags(CaseLabel.L0) @pytest.mark.timeout(BUILD_TIMEOUT) def test_create_index_callback(self, connect, collection, get_simple_index): """ target: test create index interface method: create collection and add entities in it, create index expected: return search success """ result = connect.insert(collection, default_entities) logging.getLogger().info("start index") future = connect.create_index(collection, field_name, get_simple_index, _async=True, _callback=self.check_result) logging.getLogger().info("before result") res = future.result() # TODO: logging.getLogger().info(res)
processy.py
try: from queue import Queue, Empty except ImportError: from Queue import Queue, Empty from multiprocessing import Process import logging log = logging.getLogger(__name__) def processed(items, func, max_processes=5, max_queue=200, join=True, daemon=True): """ Run a function ``func`` for each item in a generator ``items`` in a set number of processes using a queue to manage the pending ``items``. :param items: The iterable of items to be processed. :param func: A function that accepts a single argument, an item from the iterable ``items``. :param num_processes: The number of processes to be spawned. :param max_queue: How many queued items should be read from the generator and put on the queue before processing is halted to allow the processing to catch up. :param join: If this is True, processed will wait for all processes to conclude; it will block until all processes are finished. If this is False, the the tasks won't block. :param daemon: Mark the worker processes as daemons in the operating system so that the program will terminate even if they are still running. """ input_queue = Queue(maxsize=max_queue) output_queue = Queue(maxsize=max_queue) for item in items: input_queue.put(item, True) def wrapped_func(output_queue, item): try: func(item) except KeyboardInterrupt: raise except Exception as e: output_queue.put(e) processes = [] while not input_queue.empty(): try: log.exception(output_queue.get_nowait()) except Empty: pass while sum(1 for process in processes if process.is_alive()) >= max_processes: pass item = input_queue.get(True) processes.append(Process(target=wrapped_func, args = (output_queue, item,), daemon = daemon)) processes[-1].start() input_queue.task_done() if join: while any(process.is_alive() for process in processes): pass
freetests.py
#!/usr/bin/env python3 # coding: utf-8 # Copyright 2013 Abram Hindle # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # run python freetests.py import unittest import httpclient import http.server import threading import socketserver import random import time import urllib.parse import json BASEHOST = '127.0.0.1' BASEPORT = 27600 + random.randint(1,100) httpclass = httpclient #import mysolution #httpclass = mysolution # Sorry but in Python this comes out of the box! class MyHTTPHandler(http.server.BaseHTTPRequestHandler): post = None get = None def do_POST(self): try: if (self.post == None): return None else: return self.post() except Exception as e: print("Exception %s\n" % e) raise e def do_GET(self): try: print("GET %s\n" % self.path) if (self.get == None): return None else: return self.get() except Exception as e: print("Exception %s\n" % e) raise e def make_http_server(host = BASEHOST, port = BASEPORT): return http.server.HTTPServer( (host, port) , MyHTTPHandler) # always returns 404 def nothing_available(self): self.send_error(404, "File not found") self.end_headers() self.wfile.write(bytes("","utf-8")) # repeats your path back def echo_path_get(self): self.send_response(200) self.send_header("Content-type", "text/plain") self.end_headers() self.wfile.write(bytes("%s\n" % self.path,"utf-8")) # repeats your post back as json def echo_post(self): length = int(self.headers['Content-Length']) post_data = urllib.parse.parse_qs(self.rfile.read(length).decode('utf-8')) self.send_response(200) self.send_header("Content-type", "application/json") self.end_headers() self.wfile.write(bytes(json.dumps(post_data),"utf-8")) def header_check(self): response = 200 errors = [] if 'Host' not in self.headers: response = 400 errors.append("No Host header found") self.send_response(response) self.send_header("Content-type", "application/json") self.end_headers() self.wfile.write(bytes(json.dumps(errors),"utf-8")) def die_on_method(self): response = 405 errors = [] errors.append("Method Not Allowed") if 'Host' not in self.headers: errors.append("No Host header found") self.send_response(response) self.send_header("Content-type", "application/json") self.end_headers() self.wfile.write(bytes(json.dumps(errors),"utf-8")) def post_header_check(self): response = 200 errors = [] if 'Host' not in self.headers: response = 400 errors.append("No Host header found") if 'Content-length' not in self.headers: response = 400 errors.append("No Content-Length header found") self.send_response(response) self.send_header("Content-type", "application/json") self.end_headers() self.wfile.write(bytes(json.dumps(errors),"utf-8")) class TestHTTPClient(unittest.TestCase): httpd = None running = False @classmethod def setUpClass(self): '''Cache the httpd server and run it as a thread''' if (TestHTTPClient.httpd == None): try: self.thread = threading.Thread(target=self.run_server).start() time.sleep(1) except Exception as e: print(e) print("setUP: Thread died") raise(e) @classmethod def run_server(self): '''run the httpd server in a thread''' try: socketserver.TCPServer.allow_reuse_address = True http.server.HTTPServer.allow_reuse_address = True TestHTTPClient.httpd = make_http_server() print("HTTP UP!\n") TestHTTPClient.httpd.serve_forever() print("HTTP has been shutdown!\n") except Exception as e: print(e) print("run_server: Thread died") def test404GET(self): '''Test against 404 errors''' print("Testing test404GET!") MyHTTPHandler.get = nothing_available http = httpclass.HTTPClient() req = http.GET("http://%s:%d/49872398432" % (BASEHOST,BASEPORT) ) self.assertTrue(req != None, "None Returned!") self.assertTrue(req.code == 404) def test404POST(self): '''Test against 404 errors''' print("Testing test404POST!") MyHTTPHandler.post = nothing_available http = httpclass.HTTPClient() req = http.POST("http://%s:%d/49872398432" % (BASEHOST,BASEPORT) ) self.assertTrue(req != None, "None Returned!") self.assertTrue(req.code == 404) def testGET(self): '''Test HTTP GET''' print("Testing testGET!") MyHTTPHandler.get = echo_path_get http = httpclass.HTTPClient() path = "abcdef/gjkd/dsadas" url = "http://%s:%d/%s" % (BASEHOST,BASEPORT, path) req = http.GET( url ) self.assertTrue(req != None, "None Returned!") self.assertTrue(req.code == 200) self.assertTrue(req.body.find(path)>=0, "Data: [%s] " % req.body) def testGETHeaders(self): '''Test HTTP GET Headers''' print("Testing testGETHeaders!") MyHTTPHandler.get = header_check MyHTTPHandler.post = die_on_method http = httpclass.HTTPClient() path = "abcdef/gjkd/dsadas" url = "http://%s:%d/%s" % (BASEHOST,BASEPORT, path) req = http.GET( url ) self.assertTrue(req != None, "None Returned!") self.assertTrue(req.code == 200) def testPOSTHeaders(self): '''Test HTTP POST Headers''' print("Testing testPOSTHeaders!") MyHTTPHandler.post = post_header_check MyHTTPHandler.get = die_on_method http = httpclass.HTTPClient() path = "abcdef/gjkd/dsadas" url = "http://%s:%d/%s" % (BASEHOST,BASEPORT, path) req = http.POST( url ) self.assertTrue(req != None, "None Returned!") self.assertTrue(req.code == 200,"Code is %s but I wanted a 200 OK" % req.code) # consider disabling this test until everything else works def testInternetGets(self): '''Test HTTP Get in the wild, these webservers are far less forgiving''' print("Testing testInternetGets!") MyHTTPHandler.get = echo_path_get http = httpclass.HTTPClient() urls = [ "http://www.cs.ualberta.ca/", "http://softwareprocess.es/static/SoftwareProcess.es.html", "http://c2.com/cgi/wiki?CommonLispHyperSpec", "http://slashdot.org" ] for url in urls: try: req = http.GET( url ) except Exception as e: print("An Exception was thrown for %s" % url) self.assertTrue( False, "An Exception was thrown for %s %s" % (url,e)) self.assertTrue(req != None, "None Returned! %s" % url) self.assertTrue(req.code == 200 or req.code == 301 or req.code == 302, "Code: %s for %s" % (req.code, url)) if (req.code == 200): self.assertTrue(req.body.find("DOCTYPE")>=0 or req.body.find("<body")>=0 , "%s Data: [%s] " % (url,req.body)) def testPOST(self): '''Test HTTP POST with an echo server''' print("Testing testPOST!") MyHTTPHandler.post = echo_post http = httpclass.HTTPClient() path = "post_echoer" url = "http://%s:%d/%s" % (BASEHOST,BASEPORT, path) args = {'a':'aaaaaaaaaaaaa', 'b':'bbbbbbbbbbbbbbbbbbbbbb', 'c':'c', 'd':'012345\r67890\n2321321\n\r'} print("Sending POST!") req = http.POST( url, args=args ) self.assertTrue(req != None, "None Returned!") self.assertTrue(req.code == 200) print("Test Post Body: [%s]" % req.body) outargs = json.loads(req.body) print(outargs.__class__) for key in args: self.assertTrue(args[key] == outargs[key][0], "Key [%s] not found" % key) for key in outargs: self.assertTrue(args[key] == outargs[key][0], "Key [%s] not found" % key) @classmethod def tearDownClass(self): if (TestHTTPClient.httpd!=None): print("HTTP Shutdown in tearDown\n") TestHTTPClient.httpd.shutdown() TestHTTPClient.httpd.server_close() time.sleep(1) def test_test_webserver(): print("http://%s:%d/dsadsadsadsa\n" % (BASEHOST,BASEPORT) ) MyHTTPHandler.get = echo_path_get MyHTTPHandler.post = echo_post httpd = make_http_server() try: httpd.serve_forever() finally: httpd.shutdown() if __name__ == '__main__': unittest.main()
reactor.py
import asyncore import errno import io import logging import os import select import socket import ssl import sys import threading import time from collections import deque from functools import total_ordering from heapq import heappush, heappop from threading import get_ident from hazelcast.config import SSLProtocol, _Config from hazelcast.connection import Connection from hazelcast.core import Address from hazelcast.errors import HazelcastError from hazelcast.future import Future try: import fcntl except ImportError: fcntl = None _logger = logging.getLogger(__name__) # We should retry receiving/sending the message in case of these errors # EAGAIN: Resource temporarily unavailable # EWOULDBLOCK: The read/write would block # EDEADLK: Was added before, retrying it just to make sure that # client behaves the same on some edge cases. # SSL_ERROR_WANT_READ/WRITE: The socket could not satisfy the # needs of the SSL_read/write. During the negotiation process # SSL_read/write may also want to write/read data, hence may also # raise SSL_ERROR_WANT_WRITE/READ. _RETRYABLE_ERROR_CODES = ( errno.EAGAIN, errno.EWOULDBLOCK, errno.EDEADLK, ssl.SSL_ERROR_WANT_WRITE, ssl.SSL_ERROR_WANT_READ, ) def _set_nonblocking(fd): if not fcntl: return flags = fcntl.fcntl(fd, fcntl.F_GETFL) fcntl.fcntl(fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) class _SocketAdapter: def __init__(self, fd): self._fd = fd def fileno(self): return self._fd def close(self): os.close(self._fd) def getsockopt(self, level, optname, buflen=None): if level == socket.SOL_SOCKET and optname == socket.SO_ERROR and not buflen: return 0 raise NotImplementedError("Only asyncore specific behaviour is implemented.") class _AbstractWaker(asyncore.dispatcher): def __init__(self, map): asyncore.dispatcher.__init__(self, map=map) self.awake = False def writable(self): return False def wake(self): raise NotImplementedError("wake") class _PipedWaker(_AbstractWaker): def __init__(self, map): _AbstractWaker.__init__(self, map) self._read_fd, self._write_fd = os.pipe() self.set_socket(_SocketAdapter(self._read_fd)) _set_nonblocking(self._read_fd) _set_nonblocking(self._write_fd) def wake(self): if not self.awake: self.awake = True try: os.write(self._write_fd, b"x") except (IOError, ValueError): pass def handle_read(self): self.awake = False try: while len(os.read(self._read_fd, 4096)) == 4096: pass except (IOError, OSError): pass def close(self): _AbstractWaker.close(self) # Will close the reader os.close(self._write_fd) class _SocketedWaker(_AbstractWaker): def __init__(self, map): _AbstractWaker.__init__(self, map) self._writer = socket.socket() self._writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) a = socket.socket() a.bind(("127.0.0.1", 0)) a.listen(1) addr = a.getsockname() try: self._writer.connect(addr) self._reader, _ = a.accept() finally: a.close() self.set_socket(self._reader) self._writer.settimeout(0) self._reader.settimeout(0) def wake(self): if not self.awake: self.awake = True try: self._writer.send(b"x") except (IOError, socket.error, ValueError): pass def handle_read(self): self.awake = False try: while len(self._reader.recv(4096)) == 4096: pass except (IOError, socket.error): pass def close(self): _AbstractWaker.close(self) # Will close the reader self._writer.close() class _AbstractLoop: def __init__(self, map): self._map = map self._timers = [] # Accessed only from the reactor thread self._new_timers = deque() # Popped only from the reactor thread self._is_live = False self._thread = None self._ident = -1 def start(self): self._is_live = True self._thread = threading.Thread(target=self._loop, name="hazelcast-reactor") self._thread.daemon = True self._thread.start() self._ident = self._thread.ident def _loop(self): _logger.debug("Starting Reactor Thread") Future._threading_locals.is_reactor_thread = True while self._is_live: try: self.run_loop() self._check_timers() except select.error: # TODO: parse error type to catch only error "9" _logger.warning("Connection closed by server") pass except: _logger.exception("Error in Reactor Thread") # TODO: shutdown client return _logger.debug("Reactor Thread exited") self._cleanup_all_timers() def add_timer(self, delay, callback): timer = Timer(delay + time.time(), callback) self._new_timers.append((timer.end, timer)) return timer def _check_timers(self): timers = self._timers if self._new_timers: new_timers = self._new_timers while new_timers: # There is no need to check for exception here, # reactor thread is the only one popping from # the deque. So, if the we pass the size check # above, there should be at least one element heappush(timers, new_timers.popleft()) if timers: now = time.time() while timers: timer = timers[0][1] if timer.check_timer(now): heappop(timers) else: # Timer in the root of the min heap is not expired. # Therefore, there should be no expired # timers in the heap. return def _cleanup_all_timers(self): timers = self._timers new_timers = self._new_timers while timers: _, timer = timers.pop() timer.timer_ended_cb() # Although it is not the case with the current code base, # the timers ended above may add new timers. So, the order # is important. while new_timers: _, timer = new_timers.popleft() timer.timer_ended_cb() def check_loop(self): raise NotImplementedError("check_loop") def run_loop(self): raise NotImplementedError("run_loop") def wake_loop(self): raise NotImplementedError("wake_loop") def shutdown(self): raise NotImplementedError("shutdown") class _WakeableLoop(_AbstractLoop): _waker_class = _PipedWaker if os.name != "nt" else _SocketedWaker def __init__(self, map): _AbstractLoop.__init__(self, map) self.waker = self._waker_class(map) def check_loop(self): assert not self.waker.awake self.wake_loop() assert self.waker.awake self.run_loop() assert not self.waker.awake def run_loop(self): asyncore.loop(timeout=0.01, use_poll=True, map=self._map, count=1) def wake_loop(self): if self._ident != get_ident(): self.waker.wake() def shutdown(self): if not self._is_live: return self._is_live = False if self._ident != get_ident(): self._thread.join() for connection in list(self._map.values()): if connection is self.waker: continue try: connection.close(None, HazelcastError("Client is shutting down")) except OSError as connection: if connection.args[0] == socket.EBADF: pass else: raise self.waker.close() self._map.clear() class _BasicLoop(_AbstractLoop): def check_loop(self): pass def run_loop(self): asyncore.loop(timeout=0.001, use_poll=True, map=self._map, count=1) def wake_loop(self): pass def shutdown(self): if not self._is_live: return self._is_live = False if self._ident != get_ident(): self._thread.join() for connection in list(self._map.values()): try: connection.close(None, HazelcastError("Client is shutting down")) except OSError as connection: if connection.args[0] == socket.EBADF: pass else: raise self._map.clear() class AsyncoreReactor: def __init__(self): self.map = {} loop = None try: loop = _WakeableLoop(self.map) loop.check_loop() except: _logger.exception( "Failed to initialize the wakeable loop. " "Using the basic loop instead. " "When used in the blocking mode, client" "may have sub-optimal performance." ) if loop: loop.shutdown() loop = _BasicLoop(self.map) self._loop = loop self.bytes_sent = 0 self.bytes_received = 0 def start(self): self._loop.start() def add_timer(self, delay, callback): return self._loop.add_timer(delay, callback) def wake_loop(self): self._loop.wake_loop() def shutdown(self): self._loop.shutdown() def connection_factory( self, connection_manager, connection_id, address, network_config, message_callback ): return AsyncoreConnection( self, connection_manager, connection_id, address, network_config, message_callback ) _BUFFER_SIZE = 128000 class AsyncoreConnection(Connection, asyncore.dispatcher): sent_protocol_bytes = False receive_buffer_size = _BUFFER_SIZE send_buffer_size = _BUFFER_SIZE _close_timer = None def __init__( self, reactor, connection_manager, connection_id, address, config, message_callback ): asyncore.dispatcher.__init__(self, map=reactor.map) Connection.__init__(self, connection_manager, connection_id, message_callback) self._reactor = reactor self.connected_address = address self._write_queue = deque() self._write_buf = io.BytesIO() self.create_socket(socket.AF_INET, socket.SOCK_STREAM) # set the socket timeout to 0 explicitly self.socket.settimeout(0) self._set_socket_options(config) if config.ssl_enabled: self._wrap_as_ssl_socket(config, address.host) try: self.connect((address.host, address.port)) except socket.error as e: # If the connection attempt failed # immediately, remove the connection from # the dispatchers map and clean resources. self._inner_close() raise e timeout = config.connection_timeout if timeout > 0: self._close_timer = reactor.add_timer(timeout, self._close_timer_cb) self.local_address = Address(*self.socket.getsockname()) self._write_queue.append(b"CP2") def handle_connect(self): if self._close_timer: self._close_timer.cancel() self.start_time = time.time() _logger.debug("Connected to %s", self.connected_address) def handle_read(self): reader = self._reader receive_buffer_size = self.receive_buffer_size try: while True: data = self.recv(receive_buffer_size) bytes_received = len(data) self._reactor.bytes_received += bytes_received reader.read(data) self.last_read_time = time.time() if bytes_received < receive_buffer_size: break except socket.error as err: if err.args[0] not in _RETRYABLE_ERROR_CODES: # Other error codes are fatal, should close the connection self.close(None, err) if reader.length: reader.process() def handle_write(self): write_queue = self._write_queue send_buffer_size = self.send_buffer_size write_batch = [] total_length = 0 while write_queue: message_bytes = write_queue.popleft() write_batch.append(message_bytes) total_length += len(message_bytes) if total_length >= send_buffer_size: break # We enter this only if len(write_queue) > 0. # So, len(write_batch) cannot be 0. if len(write_batch) == 1: bytes_ = write_batch[0] else: buf = self._write_buf buf.seek(0) for message_bytes in write_batch: buf.write(message_bytes) bytes_ = buf.getvalue() buf.truncate(0) try: bytes_sent = self.send(bytes_) except socket.error as err: if err.args[0] in _RETRYABLE_ERROR_CODES: # Couldn't write the bytes but we should # retry it. self._write_queue.appendleft(bytes_) else: # Other error codes are fatal, should close the connection self.close(None, err) else: # No exception is thrown during the send self.last_write_time = time.time() self.sent_protocol_bytes = True self._reactor.bytes_sent += bytes_sent if bytes_sent < len(bytes_): write_queue.appendleft(bytes_[bytes_sent:]) def handle_close(self): _logger.warning("Connection closed by server") self.close(None, IOError("Connection closed by server")) def handle_error(self): # We handle retryable error codes inside the # handle_read/write. Anything else should be fatal. error = sys.exc_info()[1] _logger.debug("Received error", exc_info=True) self.close(str(error), None) def readable(self): return self.live and self.sent_protocol_bytes def _write(self, buf): self._write_queue.append(buf) self._reactor.wake_loop() def writable(self): return len(self._write_queue) > 0 def _inner_close(self): if self._close_timer: # It might be the case that connection # is closed before the timer. If we are # closing via the timer, this call has # no effects. self._close_timer.cancel() asyncore.dispatcher.close(self) self._write_buf.close() def _close_timer_cb(self): if not self.connected: self.close(None, IOError("Connection timed out")) def _set_socket_options(self, config): # set tcp no delay self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) # set socket buffer self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, _BUFFER_SIZE) self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, _BUFFER_SIZE) for level, option_name, value in config.socket_options: if option_name is socket.SO_RCVBUF: self.receive_buffer_size = value elif option_name is socket.SO_SNDBUF: self.send_buffer_size = value self.socket.setsockopt(level, option_name, value) def _wrap_as_ssl_socket(self, config: _Config, hostname: str): ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) protocol = config.ssl_protocol # Use only the configured protocol try: if protocol != SSLProtocol.SSLv2: ssl_context.options |= ssl.OP_NO_SSLv2 if protocol != SSLProtocol.SSLv3: ssl_context.options |= ssl.OP_NO_SSLv3 if protocol != SSLProtocol.TLSv1: ssl_context.options |= ssl.OP_NO_TLSv1 if protocol != SSLProtocol.TLSv1_1: ssl_context.options |= ssl.OP_NO_TLSv1_1 if protocol != SSLProtocol.TLSv1_2: ssl_context.options |= ssl.OP_NO_TLSv1_2 if protocol != SSLProtocol.TLSv1_3: ssl_context.options |= ssl.OP_NO_TLSv1_3 except AttributeError: pass ssl_context.verify_mode = ssl.CERT_REQUIRED if config.ssl_cafile: ssl_context.load_verify_locations(config.ssl_cafile) else: ssl_context.load_default_certs() if config.ssl_certfile: ssl_context.load_cert_chain( config.ssl_certfile, config.ssl_keyfile, config.ssl_password ) if config.ssl_ciphers: ssl_context.set_ciphers(config.ssl_ciphers) server_hostname = None if config.ssl_check_hostname: ssl_context.check_hostname = True server_hostname = hostname self.socket = ssl_context.wrap_socket(self.socket, server_hostname=server_hostname) def __repr__(self): return "Connection(id=%s, live=%s, remote_address=%s)" % ( self._id, self.live, self.remote_address, ) def __str__(self): return self.__repr__() @total_ordering class Timer: __slots__ = ("end", "timer_ended_cb", "canceled") def __init__(self, end, timer_ended_cb): self.end = end self.timer_ended_cb = timer_ended_cb self.canceled = False def __eq__(self, other): return self.end == other.end def __ne__(self, other): return self.end != other.end def __lt__(self, other): return self.end < other.end def cancel(self): self.canceled = True def check_timer(self, now): if self.canceled: return True if now >= self.end: self.timer_ended_cb() return True return False
test_avi_api.py
import json import logging import unittest from multiprocessing.pool import ThreadPool import pytest from avi.sdk.avi_api import (ApiSession, ObjectNotFound, APIError, ApiResponse, avi_timedelta, sessionDict) from avi.sdk.utils.api_utils import ApiUtils from avi.sdk.samples.common import get_sample_ssl_params from requests.packages import urllib3 from requests import Response from multiprocessing import Pool, Process import os import vcr import copy from datetime import timedelta from parameterized import parameterized gSAMPLE_CONFIG = None api = None log = logging.getLogger(__name__) login_info = None urllib3.disable_warnings() gapi_version = '17.2.6' config_file = pytest.config.getoption("--config") with open(config_file) as f: cfg = json.load(f) my_vcr = vcr.VCR( cassette_library_dir='python/avi/sdk/test/fixtures/cassettes/', serializer='json', match_on= ['method','url'] ) @my_vcr.use_cassette() def setUpModule(): global gSAMPLE_CONFIG gSAMPLE_CONFIG = cfg log.debug(' read config %s', gSAMPLE_CONFIG) global login_info login_info = gSAMPLE_CONFIG["LoginInfo"] global api api = ApiSession.get_session( login_info["controller_ip"], login_info.get("username", "admin"), login_info.get("password", "fr3sca$%^"), tenant=login_info.get("tenant", "admin"), tenant_uuid=login_info.get("tenant_uuid", None), api_version=login_info.get("api_version", gapi_version), verify=False) def create_sessions(args): login_info, num_sessions = args log.info('pid %d num_sessions %d', os.getpid(), num_sessions) user = login_info.get("username", "admin") cip = login_info.get("controller_ip") port = login_info.get("port") k_port = port if port else 443 key = cip + ":" + user + ":" + str(k_port) for _ in range(num_sessions): api = ApiSession( login_info["controller_ip"], login_info.get("username", "admin"), login_info.get("password", "fr3sca$%^"), api_version=login_info.get( "api_version", "17.1"), data_log=login_info['data_log']) return 1 if key in sessionDict else 0 def shared_session_check(index): rsp = api.get('tenant') return rsp.status_code class Test(unittest.TestCase): @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_4_0 def test_basic_vs(self): basic_vs_cfg = gSAMPLE_CONFIG["BasicVS"] vs_obj = basic_vs_cfg["vs_obj"] resp = api.post('pool', data=json.dumps(basic_vs_cfg["pool_obj"]), api_version='17.1.1') assert resp.status_code in (200, 201) vs_obj["pool_ref"] = api.get_obj_ref(resp.json()) resp = api.post('virtualservice', data=json.dumps(vs_obj), api_version='17.1.1') assert resp.status_code in (200, 201) pool_name = gSAMPLE_CONFIG["BasicVS"]["pool_obj"]["name"] resp = api.get('virtualservice', tenant='admin', api_version='17.1.1') assert resp.json()['count'] >= 1 resp = api.delete_by_name('virtualservice', vs_obj['name'], api_version='17.1.1') assert resp.status_code in (200, 204) resp = api.delete_by_name("pool", pool_name, api_version='17.1.1') assert resp.status_code in (200, 204) @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_22_0 def test_reuse_api_session(self): api1 = ApiSession(avi_credentials=api.avi_credentials, verify=False) api2 = ApiSession.get_session(avi_credentials=api.avi_credentials, verify=False) assert api1 == api2 @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_17_0 def test_lazy_authentication(self): ApiSession.clear_cached_sessions() session = ApiSession( controller_ip=login_info["controller_ip"], username=login_info.get("username", "admin"), password=login_info.get("password", "fr3sca$%^"), lazy_authentication=True) assert not session.keystone_token session.get('pool') assert session.keystone_token ApiSession.clear_cached_sessions() session = ApiSession( controller_ip=login_info["controller_ip"], username=login_info.get("username", "admin"), password=login_info.get("password", "fr3sca$%^"), lazy_authentication=False) assert session.keystone_token @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_34_0 def test_ssl_vs(self): papi = ApiSession(api.avi_credentials.controller, api.avi_credentials.username, api.avi_credentials.password, api_version=api.avi_credentials.api_version, verify=False, data_log=True) ssl_vs_cfg = gSAMPLE_CONFIG["SSL-VS"] vs_obj = ssl_vs_cfg["vs_obj"] pool_name = gSAMPLE_CONFIG["SSL-VS"]["pool_obj"]["name"] resp = papi.post('pool', data=gSAMPLE_CONFIG["SSL-VS"]["pool_obj"]) assert resp.status_code == 201 pool_ref = papi.get_obj_ref(resp.json()) cert, key, _, _ = get_sample_ssl_params \ (folder_path=os.path.abspath( os.path.join(os.path.dirname(__file__), '..', 'samples')) + os.sep) api_utils = ApiUtils(papi) try: resp = api_utils.import_ssl_certificate("ssl-vs-kc", key, cert) ssl_kc = resp.json() except: ssl_kc = api.get_object_by_name('sslkeyandcertificate', 'ssl-vs-kc') ssl_key_and_cert_ref = [papi.get_obj_ref(ssl_kc)] vs_obj["pool_ref"] = pool_ref vs_obj["ssl_key_and_certificate_refs"] = ssl_key_and_cert_ref resp = papi.post('virtualservice', data=json.dumps(vs_obj)) assert resp.status_code < 300 resp = papi.delete_by_name('virtualservice', vs_obj['name']) assert resp.status_code in (200, 204) resp = papi.delete_by_name("pool", pool_name) assert resp.status_code in (200, 204) resp = api.delete_by_name('sslkeyandcertificate', 'ssl-vs-kc') assert resp.status_code in (200, 204) @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_6_0 def test_cloned_session_headers(self): api2 = ApiSession(controller_ip=api.avi_credentials.controller, username=api.avi_credentials.username, \ password=api.avi_credentials.password, tenant=api.avi_credentials.tenant, tenant_uuid=api.avi_credentials.tenant_uuid, api_version=api.avi_credentials.api_version, verify=False, data_log=api.data_log) SHARED_USER_HDRS = ['X-CSRFToken', 'Session-Id', 'Referer'] for hdr in SHARED_USER_HDRS: if hdr in api.headers: assert api.headers[hdr] == api2.headers[hdr] @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_2_0 def test_avi_json(self): rsp = Response() rsp.status_code = 404 rsp._content = 'Not found' try: avi_rsp = ApiResponse(rsp) avi_rsp.json() assert False except ObjectNotFound: pass except Exception: assert False rsp.status_code = 501 try: avi_rsp = ApiResponse(rsp) avi_rsp.json() assert False except APIError: pass except Exception: assert False rsp.status_code = 200 rsp._content = json.dumps({'count': 3, 'results': ['a', 'b', 'c']}) try: avi_rsp = ApiResponse(rsp) obj = avi_rsp.json() assert obj['count'] assert avi_rsp.count() == 3 assert len(obj['results']) == 3 except Exception as e: log.debug('exception %s', str(e)) log.debug('%s', traceback.format_exc()) assert False @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_18_0 def test_multiple_tenants(self): """ Tests api with multiple tenants to make sure object is only returned for the right tenant. """ tobj = {'name': 'test-tenant'} resp = api.post('tenant', data=tobj) assert resp.status_code in (200, 201) tapi = ApiSession(controller_ip=api.avi_credentials.controller, username=api.avi_credentials.username, password=api.avi_credentials.password, tenant=tobj['name'], verify=False, data_log=api.data_log) t_obj = tapi.get_object_by_name('tenant', tobj['name']) # created pool. log.info('tenant %s', t_obj) basic_vs_cfg = gSAMPLE_CONFIG["BasicVS"] pool_cfg = copy.deepcopy(basic_vs_cfg["pool_obj"]) pool_cfg['name'] = pool_cfg['name'] + '-test-tenant' resp = tapi.post('pool', data=pool_cfg) assert resp.status_code in (200, 201) # check pool was not created in tenant admin pname = pool_cfg['name'] resp = api.get_object_by_name('pool', pname) assert resp is None resp = tapi.get_object_by_name('pool', pname) assert resp resp = api.get_object_by_name('pool', pname, tenant_uuid=t_obj['uuid']) assert resp resp = api.get_object_by_name('pool', pname, tenant='test-tenant') assert resp resp = tapi.delete_by_name("pool", pname) assert resp.status_code in (200, 204) resp = api.get_object_by_name('pool', pname, tenant='test-tenant') assert resp is None resp = tapi.delete_by_name('tenant', 'test-tenant', tenant='admin') assert resp.status_code in (200, 204) @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_36_0 def test_timeout(self): resp = api.get_object_by_name('tenant', 'admin', timeout=2) assert resp @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_8_0 def test_force_uuid(self): basic_vs_cfg = gSAMPLE_CONFIG["BasicVS"] pool_cfg = copy.deepcopy(basic_vs_cfg["pool_obj"]) pool_cfg['name'] = pool_cfg['name'] + '-force' resp = api.post('pool', data=pool_cfg, force_uuid='pool-force-42') assert resp.status_code in (200, 201) pool_obj = resp.json() assert pool_obj['uuid'] == 'pool-force-42' pool_obj = api.get_object_by_name('pool', pool_cfg['name']) assert pool_obj['uuid'] == 'pool-force-42' resp = api.delete_by_name("pool", pool_cfg['name']) assert resp.status_code in (200, 204) @pytest.mark.skip_travis @pytest.mark.TCID1_48_1547_19_0 def test_multiprocess_cache(self): p = Pool(4) num_sessions_list = [1, 4, 3, 2, 1] p_args = [] for num_ssn in num_sessions_list: t = (login_info, num_ssn) p_args.append(t) results = p.map(create_sessions, p_args) for result in results: assert result == 1 @pytest.mark.skip_travis @pytest.mark.TCID1_48_1547_20_0 def test_multiprocess_sharing(self): api.get_object_by_name('tenant', name='admin') p = Process(target=shared_session_check, args=(1,)) p.start() p.join() p = Pool(16) shared_sessions = [] for index in range(16): shared_sessions.append(index) results = p.map(shared_session_check, shared_sessions) print "results :",results for result in results: assert result == 200 @pytest.mark.travis @pytest.mark.TCID1_48_1547_5_0 def test_cleanup_sessions(self): api._update_session_last_used() assert api.key in sessionDict assert 'api' in sessionDict[api.key] assert 'last_used' in sessionDict[api.key] @pytest.mark.travis @pytest.mark.TCID1_48_1547_3_0 def test_avi_timedelta(self): try: avi_timedelta(10) assert False except: pass assert avi_timedelta(timedelta(seconds=10)) == 10 @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_25_0 def test_session_reset(self): papi = ApiSession(controller_ip=api.avi_credentials.controller, username=api.avi_credentials.username, password=api.avi_credentials.password, verify=False, api_version=api.avi_credentials.api_version, data_log=api.data_log) res = papi.get('pool', params={'fields': 'name'}) assert res.status_code == 200 papi.reset_session() res = papi.get('pool', params={'fields': 'name'}) assert res.status_code == 200 data = {'name': 'test-reset'} res = papi.post('pool', data=data) assert res.status_code == 201 papi.reset_session() res = papi.delete_by_name('pool', 'test-reset') assert res.status_code == 204 @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_24_0 def test_session_multi_reset(self): papi = ApiSession(controller_ip=api.avi_credentials.controller, username=api.avi_credentials.username, password=api.avi_credentials.password, verify=False, api_version=api.avi_credentials.api_version, data_log=api.data_log) papi.reset_session() papi.reset_session() @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_21_0 def test_retry_unauth_api(self): papi = ApiSession(controller_ip=api.avi_credentials.controller, username=api.avi_credentials.username, password=api.avi_credentials.password, verify=False, api_version=api.avi_credentials.api_version, data_log=api.data_log) papi.keystone_token = 'invalid' resp = papi.post('tenant', data={'name': 'tenant1', 'local': 'true'}) assert resp.status_code == 201 papi.delete_by_name('tenant', 'tenant1') # Added test cases for getter and setter methods in avi_api @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_11_0 def test_get_controller_ip(self): api1 = ApiSession(avi_credentials=api.avi_credentials, verify=False) api2 = ApiSession.get_session(avi_credentials=api.avi_credentials, verify=False) assert api1.controller_ip == api2.controller_ip @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_27_0 def test_set_controller_ip(self): api1 = ApiSession(avi_credentials=api.avi_credentials, verify=False) api2 = ApiSession.get_session(avi_credentials=api.avi_credentials, verify=False) api1.controller_ip = '10.10.2.3' assert api1.controller_ip == api2.controller_ip api1.controller_ip = login_info['controller_ip'] @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_16_0 def test_get_username(self): api1 = ApiSession(avi_credentials=api.avi_credentials, verify=False) api2 = ApiSession.get_session(avi_credentials=api.avi_credentials, verify=False) assert api1.username == api2.username @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_33_0 def test_set_username(self): api1 = ApiSession(avi_credentials=api.avi_credentials, verify=False) api2 = ApiSession.get_session(avi_credentials=api.avi_credentials, verify=False) api1.username = 'avi-networks' assert api1.username == api2.username api1.username = login_info.get("username", "admin") @pytest.mark.skip_travis @pytest.mark.TCID1_48_1547_13_0 def test_get_password(self): api1 = ApiSession(avi_credentials=api.avi_credentials, verify=False) api2 = ApiSession.get_session(avi_credentials=api.avi_credentials, verify=False) assert api1.password == api2.password @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_29_0 def test_set_password(self): api1 = ApiSession(avi_credentials=api.avi_credentials, verify=False) api2 = ApiSession.get_session(avi_credentials=api.avi_credentials, verify=False) api1.password = 'admin@#$' assert api1.password == api2.password api1.password = login_info.get("password", "fr3sca$%^") @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_12_0 def test_get_key_token(self): api1 = ApiSession(avi_credentials=api.avi_credentials, verify=False) api2 = ApiSession.get_session(avi_credentials=api.avi_credentials, verify=False) assert api1.keystone_token == api2.keystone_token @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_28_0 def test_set_key_token(self): api1 = ApiSession(avi_credentials=api.avi_credentials, verify=False) api2 = ApiSession.get_session(avi_credentials=api.avi_credentials, verify=False) token = api1.keystone_token api1.keystone_token = "abc1werxSWASC" assert api1.keystone_token == api2.keystone_token api1.keystone_token = token @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_15_0 def test_get_tenant_uuid(self): api1 = ApiSession(avi_credentials=api.avi_credentials, verify=False) api2 = ApiSession.get_session(avi_credentials=api.avi_credentials, verify=False) assert api1.tenant_uuid == api2.tenant_uuid @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_32_0 def test_set_tenant_uuid(self): api1 = ApiSession(avi_credentials=api.avi_credentials, verify=False) api2 = ApiSession.get_session(avi_credentials=api.avi_credentials, verify=False) api1.tenant_uuid = "Xyssdd123YYY-dummy" assert api1.tenant_uuid == api2.tenant_uuid api1.tenant_uuid = login_info.get("tenant_uuid", None) @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_35_0 def test_tenant(self): api1 = ApiSession(avi_credentials=api.avi_credentials, verify=False) api2 = ApiSession.get_session(avi_credentials=api.avi_credentials, verify=False) assert api1.tenant == api2.tenant @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_31_0 def test_set_tenant(self): api1 = ApiSession(avi_credentials=api.avi_credentials, verify=False) api2 = ApiSession.get_session(avi_credentials=api.avi_credentials, verify=False) api1.tenant == 'vmware' assert api1.tenant == api2.tenant api1.tenant = login_info.get("tenant", "admin") @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_14_0 def test_get_port(self): api1 = ApiSession(avi_credentials=api.avi_credentials, verify=False) api2 = ApiSession.get_session(avi_credentials=api.avi_credentials, verify=False) assert api1.port == api2.port @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_30_0 def test_set_port(self): api1 = ApiSession(avi_credentials=api.avi_credentials, verify=False) api2 = ApiSession.get_session(avi_credentials=api.avi_credentials, verify=False) api1.port = '9993' assert api1.port == api2.port api1.port = login_info.get("port") @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_9_0 def test_get_api_version(self): api1 = ApiSession(avi_credentials=api.avi_credentials, verify=False) api2 = ApiSession.get_session(avi_credentials=api.avi_credentials, verify=False) assert api1.api_version == api2.api_version @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_26_0 def test_set_api_version(self): api1 = ApiSession(avi_credentials=api.avi_credentials, verify=False) api2 = ApiSession.get_session(avi_credentials=api.avi_credentials, verify=False) api1.api_version = "17.2.2" assert api1.api_version == api2.api_version api1.api_version = login_info.get("api_version", gapi_version) @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_10_0 def test_get_controller_details(self): controller_details = api.get_controller_details() assert controller_details['controller_ip'] == api.controller_ip assert controller_details[ 'controller_api_version'] == api.remote_api_version @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_23_0 def test_session_connected(self): ApiSession.clear_cached_sessions() session = ApiSession( controller_ip=login_info["controller_ip"], username=login_info.get("username", "admin"), password=login_info.get("password", "fr3sca$%^"), lazy_authentication=True) assert not session.connected session.get('pool') assert session.connected ApiSession.clear_cached_sessions() session = ApiSession( controller_ip=login_info["controller_ip"], username=login_info.get("username", "admin"), password=login_info.get("password", "fr3sca$%^"), lazy_authentication=False) assert session.connected @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_37_0 def test_user_login(self): api1 = ApiSession(controller_ip=login_info.get('controller_ip'), username=login_info.get('username'), password=login_info.get('password'), lazy_authentication=False) user_info = gSAMPLE_CONFIG["Passwords"] original_password = login_info.get('password') new_password = "admin123@!@#" user_info['password'] = new_password user_info['old_password'] = original_password res = api1.put('useraccount', data=json.dumps(user_info)) assert res.status_code == 200 api1.clear_cached_sessions() api2 = ApiSession(controller_ip=login_info.get('controller_ip'), username=login_info.get('username'), password=new_password, lazy_authentication=False) res = api2.get('pool') assert res.status_code in [200, 204] old_password = user_info['password'] changed_password = original_password user_info['password'] = original_password user_info['old_password'] = old_password result = api2.put('useraccount', user_info) assert result.status_code == 200 res = api2.get('pool') assert res.status_code in [200, 204] api2.clear_cached_sessions() api3 = ApiSession(controller_ip=login_info.get('controller_ip'), username=login_info.get('username'), password=changed_password, lazy_authentication=False) res = api3.get('pool') assert res.status_code in [200, 204] @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_7_0 def test_context_sharing(self): api1 = ApiSession(controller_ip=login_info.get('controller_ip'), username=login_info.get('username'), password=login_info.get('password'), lazy_authentication=False) context_api1 = api1.get_context() api1.clear_cached_sessions() api2 = ApiSession(controller_ip=login_info.get('controller_ip'), username=login_info.get('username'), password=login_info.get('password'), session_id= context_api1['session_id'], csrftoken=context_api1['csrftoken'], lazy_authentication=True) api2.get('pool') assert api2.get_context() == context_api1 @pytest.mark.travis @my_vcr.use_cassette() @pytest.mark.TCID1_48_1547_1_0 def test_api_session(self): api_session1 = ApiSession(controller_ip=login_info.get('controller_ip'), username=login_info.get('username'), password=login_info.get('password'), tenant=login_info.get("tenant", "admin"), tenant_uuid=login_info.get("tenant_uuid", None), api_version=login_info.get("api_version", gapi_version), verify=False) api_session2 = ApiSession.get_session( login_info["controller_ip"], login_info.get("username", "admin"), login_info.get("password", "fr3sca$%^"), tenant=login_info.get("tenant", "admin"), tenant_uuid=login_info.get("tenant_uuid", None), api_version=login_info.get("api_version", gapi_version), verify=False) assert api_session1.avi_credentials.session_id == api_session2.avi_credentials.session_id @parameterized.expand( [('https://192.10.100.132/api/vrfcontext/vrfcontext-31f1b55f-319c-44eb-862f-69d79ffdf295', 'vrfcontext-31f1b55f-319c-44eb-862f-69d79ffdf295'), ('https://192.10.100.132/api/vrfcontext/vrfcontext-31f1b55f-319c-44eb-862f-69d79ffdf295#test-vrf', 'vrfcontext-31f1b55f-319c-44eb-862f-69d79ffdf295'), ('vrfcontext-31f1b55f-319c-44eb-862f-69d79ffdf295', 'vrfcontext-31f1b55f-319c-44eb-862f-69d79ffdf295') ]) @pytest.mark.travis def test_get_slug_from_uri(self, input, expected): assert api.get_slug_from_uri(input) == expected if __name__ == "__main__": unittest.main()
client.py
from socket import AF_INET, socket, SOCK_STREAM from threading import Thread import tkinter def receive(): """Handles receiving of messages.""" while True: try: msg = client_socket.recv(BUFSIZ).decode("utf8") if(msg=="options"): global pool global go pool=tkinter.Button(top,text="uberpool",command=ploptions) pool.pack() pool.place(x = 30, y = 150) go=tkinter.Button(top,text="ubergo",command=gooptions) go.pack() go.place(x=150,y = 150) else: msg_list.insert(tkinter.END, msg) except OSError: # Possibly client has left the chat. break def send(event=None): # event is passed by binders. """Handles sending of messages.""" msg = my_msg.get() my_msg.set("") # Clears input field. client_socket.send(bytes(msg, "utf8")) if msg == "{quit}": client_socket.close() top.quit() def ploptions(): print("inside poolfn") global pool global go client_socket.send(bytes("pool","utf8")) pool.config(state=tkinter.DISABLED) go.config(state=tkinter.DISABLED) def gooptions(): print("insidde gofn") global go global pool client_socket.send(bytes("go","utf8")) pool.config(state=tkinter.DISABLED) go.config(state=tkinter.DISABLED) def on_closing(event=None): """This function is to be called when the window is closed.""" my_msg.set("{quit}") send() top = tkinter.Tk() top.title("book a cab") global pool global go messages_frame = tkinter.Frame(top) my_msg = tkinter.StringVar() # For the messages to be sent. my_msg.set("Type your name here.") scrollbar = tkinter.Scrollbar(messages_frame) # To navigate through past messages. # Following will contain the messages. msg_list = tkinter.Listbox(messages_frame, height=25, width=70, yscrollcommand=scrollbar.set) scrollbar.pack(side=tkinter.RIGHT, fill=tkinter.Y) msg_list.pack(side=tkinter.LEFT, fill=tkinter.BOTH) msg_list.pack() messages_frame.pack() entry_field = tkinter.Entry(top, textvariable=my_msg) entry_field.bind("<Return>", send) entry_field.pack() send_button = tkinter.Button(top, text="Send", command=send) send_button.pack() top.protocol("WM_DELETE_WINDOW", on_closing) #----Now comes the sockets part---- HOST = input('Enter host: ') PORT = input('Enter port: ') if not PORT: PORT = 33000 else: PORT = int(PORT) BUFSIZ = 1024 ADDR = (HOST, PORT) client_socket = socket(AF_INET, SOCK_STREAM) client_socket.connect(ADDR) receive_thread = Thread(target=receive) receive_thread.start() tkinter.mainloop() # Starts GUI execution.
test_forward.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # pylint: disable=import-self, invalid-name, unused-argument """ Tensorflow testcases ==================== This article is a test script to test tensorflow operator with Relay. """ from __future__ import print_function import threading import numpy as np import pytest try: import tensorflow.compat.v1 as tf except ImportError: import tensorflow as tf from tensorflow.python.framework import constant_op from tensorflow.python.framework import graph_util from tensorflow.python.ops import nn_ops from tensorflow.python.ops import nn from tensorflow.python.ops import array_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import variable_scope from tensorflow.python.ops import variables from tensorflow.python.ops import init_ops from tensorflow.python.framework import function from tensorflow.python.framework import ops from tensorflow.python.framework import dtypes from tensorflow.python.ops import gen_functional_ops from distutils.version import LooseVersion import tvm from tvm import te from tvm import relay import tvm.relay.testing.tf as tf_testing from tvm.runtime.vm import VirtualMachine from tvm.relay.frontend.tensorflow import from_tensorflow from packaging import version as package_version import tvm.testing ####################################################################### # Generic run functions for TVM & tensorflow # ------------------------------------------ def convert_to_list(x): if not isinstance(x, list): x = [x] return x tf_dtypes = { "float32": tf.float32, "float16": tf.float16, "float64": tf.float64, "int32": tf.int32, "uint8": tf.uint8, "int8": tf.int8, "int16": tf.int16, "uint16": tf.uint16, "int64": tf.int64, } def vmobj_to_list(o): if isinstance(o, tvm.nd.NDArray): return [o.numpy()] elif isinstance(o, tvm.runtime.container.ADT): result = [] for f in o: result.extend(vmobj_to_list(f)) return result elif isinstance(o, tvm.relay.backend.interpreter.ConstructorValue): if o.constructor.name_hint == "Cons": tl = vmobj_to_list(o.fields[1]) hd = vmobj_to_list(o.fields[0]) hd.extend(tl) return hd elif o.constructor.name_hint == "Nil": return [] elif "tensor_nil" in o.constructor.name_hint: return [0] elif "tensor" in o.constructor.name_hint: return [o.fields[0].numpy()] else: raise RuntimeError("Unknown object type: %s" % o.constructor.name_hint) else: raise RuntimeError("Unknown object type: %s" % type(o)) def run_tvm_graph( graph_def, input_data, input_node, num_output=1, target="llvm", out_names=None, opt_level=3, mode="graph_executor", cuda_layout="NCHW", layout=None, disabled_pass=None, ignore_in_shape=False, serialize=False, ): """Generic function to compile on relay and execute on tvm""" input_data = convert_to_list(input_data) input_node = convert_to_list(input_node) if target == "cuda": layout = cuda_layout target_host = None if ignore_in_shape: shape_dict = None else: shape_dict = { e: i.shape if hasattr(i, "shape") else () for e, i in zip(input_node, input_data) } mod, params = relay.frontend.from_tensorflow( graph_def, layout=layout, shape=shape_dict, outputs=out_names ) dev = tvm.device(target, 0) if mode == "debug": ex = relay.create_executor(mode, mod=mod, device=tvm.cpu(), target="llvm") inputs = [] for param in mod["main"].params: found = False for i, n in enumerate(input_node): if n == param.name_hint: found = True inputs.append(tvm.nd.array(input_data[i])) break # Interpreter doesn't bind constants, so still need to find in params if not found: inputs.append(tvm.nd.array(params[param.name_hint])) result = ex.evaluate()(*inputs) return vmobj_to_list(result) elif mode == "vm": with tvm.transform.PassContext(opt_level=opt_level, disabled_pass=disabled_pass): mod = relay.transform.InferType()(mod) vm_exec = relay.vm.compile(mod, target="llvm", params=params) if serialize: code, lib = vm_exec.save() vm_exec = tvm.runtime.vm.Executable.load_exec(code, lib) vm = VirtualMachine(vm_exec, tvm.cpu()) inputs = {} for e, i in zip(input_node, input_data): inputs[e] = tvm.nd.array(i) result = vm.invoke("main", **inputs) return vmobj_to_list(result) else: with tvm.transform.PassContext(opt_level=opt_level, disabled_pass=disabled_pass): target = tvm.target.Target(target, target_host) graph, lib, params = relay.build(mod, target=target, params=params) from tvm.contrib import graph_executor m = graph_executor.create(graph, lib, dev) # set inputs for e, i in zip(input_node, input_data): if e != "": m.set_input(e, tvm.nd.array(i)) m.set_input(**params) # execute m.run() # get outputs assert out_names is None or num_output == len( out_names ), "out_names: {} num_output: {}".format(out_names, num_output) tvm_output_list = [m.get_output(i).numpy() for i in range(num_output)] return tvm_output_list def run_tf_graph(sess, input_data, input_node, output_node): """Generic function to execute tensorflow""" input_data = convert_to_list(input_data) input_node = convert_to_list(input_node) output_node = convert_to_list(output_node) tensor = [sess.graph.get_tensor_by_name(output_name) for output_name in output_node] input_dict = {e: input_data[i] for i, e in enumerate(input_node)} if len(input_node) == 1 and input_node[0] == "": output_data = sess.run(tensor) else: output_data = sess.run(tensor, input_dict) return output_data def compare_tf_with_tvm( in_data, in_name, out_name, init_global_variables=False, no_gpu=False, opt_level=3, mode="graph_executor", cuda_layout="NCHW", add_shapes_to_graph_def=True, targets=None, ignore_in_shape=False, ): """Generic function to generate and compare tensorflow and TVM output""" def name_without_num(name): return name.split(":")[0] if ":" in name else name out_name = convert_to_list(out_name) out_node = [name_without_num(name) for name in out_name] in_data = convert_to_list(in_data) in_name = convert_to_list(in_name) in_node = [name_without_num(name) for name in in_name] with tf.Session() as sess: if init_global_variables: sess.run(variables.global_variables_initializer()) final_graph_def = ( tf_testing.AddShapesToGraphDef(sess, out_node) if add_shapes_to_graph_def else tf.get_default_graph().as_graph_def() ) tf_output = run_tf_graph(sess, in_data, in_name, out_name) devices = targets if targets else ["llvm", "cuda"] for device in devices: dev = tvm.device(device, 0) if not tvm.testing.device_enabled(device): print("Skip because %s is not enabled" % device) continue if no_gpu and device == "cuda": continue if "cublas" in device and not tvm.get_global_func("tvm.contrib.cublas.matmul", True): print("Skip because cublas is not enabled: %s" % device) continue tvm_output = run_tvm_graph( final_graph_def, in_data, in_node, target=device, out_names=out_name, num_output=len(out_name), opt_level=opt_level, mode=mode, cuda_layout=cuda_layout, ignore_in_shape=ignore_in_shape, ) # since the names from tensorflow and relay runs are not exactly same, # first len(tf_output) will be compared for i in range(len(tf_output)): if not isinstance(tf_output[i], np.ndarray): assert len(tvm_output[i].shape) == 0 tvm.testing.assert_allclose(tf_output[i], tvm_output[i], atol=1e-5, rtol=1e-5) sess.close() def is_gpu_available(): from tensorflow.python.client import device_lib local_device_protos = device_lib.list_local_devices() gpu_list = [x.name for x in local_device_protos if x.device_type == "GPU"] if len(gpu_list) > 0: print("Tensorflow GPU:", gpu_list) return True else: return False ####################################################################### # Pooling # ------- def _test_pooling_iteration(input_shape, **kwargs): """One iteration of pool operation with given shapes and attributes""" x = -np.arange(np.prod(input_shape), dtype=np.float32).reshape(input_shape) - 1 with tf.Graph().as_default(): in_data = array_ops.placeholder(shape=input_shape, dtype="float32") nn_ops.pool(in_data, **kwargs) if kwargs["pooling_type"] == "MAX": out_name = "max_pool:0" else: out_name = "avg_pool:0" compare_tf_with_tvm(x, "Placeholder:0", out_name) def _test_pooling(input_shape, **kwargs): _test_pooling_iteration(input_shape, **kwargs) if is_gpu_available(): if len(input_shape) == 4: input_shape = [input_shape[ii] for ii in (0, 3, 1, 2)] kwargs["data_format"] = "NCHW" _test_pooling_iteration(input_shape, **kwargs) def _test_pooling_dynamic(input_shape, np_shape, **kwargs): """Pooling with dynamic height and width dimensions.""" x = -np.arange(np.prod(np_shape), dtype=np.float32).reshape(np_shape) - 1 with tf.Graph().as_default(): in_data = array_ops.placeholder(shape=input_shape, dtype="float32") nn_ops.pool(in_data, **kwargs) if kwargs["pooling_type"] == "MAX": out_name = "max_pool:0" else: out_name = "avg_pool:0" compare_tf_with_tvm(x, "Placeholder:0", out_name, mode="vm", ignore_in_shape=True) @tvm.testing.uses_gpu def test_forward_pooling(): """Pooling""" # TensorFlow only supports NDHWC for max_pool3d on CPU for pool_type in ["AVG", "MAX"]: # NDHWC is the default layout for max_pool3d and avg_pool3d in TensorFlow _test_pooling( input_shape=[1, 3, 32, 32, 32], window_shape=[2, 2, 2], padding="VALID", pooling_type=pool_type, dilation_rate=[1, 1, 1], strides=[2, 2, 2], ) _test_pooling( input_shape=[1, 3, 32, 32, 32], window_shape=[1, 1, 1], padding="SAME", pooling_type=pool_type, dilation_rate=[1, 1, 1], strides=[1, 1, 1], ) _test_pooling( input_shape=[1, 3, 32, 32, 32], window_shape=[2, 2, 2], padding="SAME", pooling_type=pool_type, dilation_rate=[1, 1, 1], strides=[2, 2, 2], ) _test_pooling_dynamic( input_shape=[1, None, None, 3], np_shape=[1, 32, 32, 3], window_shape=[2, 2], padding="SAME", pooling_type=pool_type, dilation_rate=[1, 1], strides=[1, 1], ) # test cases for max_pool3d & avg_pool3d with layout NCDHW # TensorFlow pool3d doesn't support NCDHW on cpu if is_gpu_available(): _test_pooling( input_shape=[1, 3, 32, 32, 32], window_shape=[1, 1, 1], padding="SAME", pooling_type=pool_type, dilation_rate=[1, 1, 1], strides=[1, 1, 1], data_format="NCDHW", ) _test_pooling( input_shape=[1, 3, 32, 32, 32], window_shape=[2, 2, 2], padding="VALID", pooling_type=pool_type, dilation_rate=[1, 1, 1], strides=[2, 2, 2], data_format="NCDHW", ) _test_pooling( input_shape=[2, 9, 10, 2], window_shape=[1, 1], padding="SAME", pooling_type=pool_type, dilation_rate=[1, 1], strides=[1, 1], ) _test_pooling( input_shape=[2, 10, 9, 2], window_shape=[1, 1], padding="SAME", pooling_type=pool_type, dilation_rate=[1, 1], strides=[1, 1], ) _test_pooling( input_shape=[2, 9, 10, 2], window_shape=[2, 1], padding="SAME", pooling_type=pool_type, dilation_rate=[1, 1], strides=[1, 1], ) _test_pooling( input_shape=[2, 10, 9, 2], window_shape=[2, 3], padding="SAME", pooling_type=pool_type, dilation_rate=[1, 1], strides=[2, 1], ) # Tests involving SpaceToBatchND _test_pooling( input_shape=[1, 1, 2, 1], window_shape=[1, 1], padding="VALID", pooling_type=pool_type, dilation_rate=[1, 2], ) _test_pooling( input_shape=[1, 2, 1], window_shape=[1], padding="VALID", pooling_type=pool_type, dilation_rate=[2], ) # Explicit padding if package_version.parse(tf.VERSION) >= package_version.parse("2.4.1"): _test_pooling( input_shape=[2, 9, 10, 2], window_shape=[4, 4], padding=[[0, 0], [0, 1], [2, 3], [0, 0]], pooling_type="MAX", dilation_rate=[1, 1], strides=[1, 1], ) ####################################################################### # Convolution # ----------- def _test_convolution( opname, tensor_in_sizes, filter_in_sizes, dilations, strides, padding, data_format, deconv_output_shape=[], add_shapes_to_graph_def=True, ): """One iteration of convolution with given shapes and attributes""" total_size_1 = np.prod(tensor_in_sizes) total_size_2 = np.prod(filter_in_sizes) # Initializes the input tensor with array containing incrementing # numbers from 1. data_array = [f * 1.0 for f in range(1, total_size_1 + 1)] filter_array = [f * 1.0 for f in range(1, total_size_2 + 1)] with tf.Graph().as_default(): in_data = array_ops.placeholder(shape=tensor_in_sizes, dtype="float32") in_filter = constant_op.constant(filter_array, shape=filter_in_sizes, dtype="float32") if data_format == "NHWC": strides = [1] + strides + [1] dilations = [1] + dilations + [1] else: strides = [1, 1] + strides dilations = [1, 1] + dilations if opname == "conv": nn_ops.conv2d( in_data, in_filter, strides=strides, dilations=dilations, padding=padding, data_format=data_format, ) compare_tf_with_tvm( np.reshape(data_array, tensor_in_sizes).astype("float32"), "Placeholder:0", "Conv2D:0", add_shapes_to_graph_def=add_shapes_to_graph_def, ) elif opname == "conv_transpose": nn_ops.conv2d_transpose( in_data, in_filter, output_shape=deconv_output_shape, strides=strides, padding=padding, data_format=data_format, ) compare_tf_with_tvm( np.reshape(data_array, tensor_in_sizes).astype("float32"), "Placeholder:0", "conv2d_transpose:0", add_shapes_to_graph_def=add_shapes_to_graph_def, ) else: nn_ops.depthwise_conv2d_native( in_data, in_filter, strides=strides, dilations=dilations, padding=padding, data_format=data_format, ) compare_tf_with_tvm( np.reshape(data_array, tensor_in_sizes).astype("float32"), "Placeholder:0", "DepthwiseConv2dNative:0", add_shapes_to_graph_def=add_shapes_to_graph_def, ) @tvm.testing.uses_gpu def test_forward_convolution(): if is_gpu_available(): _test_convolution("conv", [4, 176, 8, 8], [1, 1, 176, 32], [1, 1], [1, 1], "SAME", "NCHW") _test_convolution("conv", [4, 19, 17, 17], [3, 3, 19, 19], [1, 1], [2, 2], "VALID", "NCHW") _test_convolution("conv", [4, 124, 17, 17], [1, 1, 124, 19], [1, 1], [1, 1], "SAME", "NCHW") _test_convolution("conv", [4, 12, 17, 17], [3, 3, 12, 32], [1, 1], [2, 2], "VALID", "NCHW") _test_convolution( "depthwise", [4, 176, 8, 8], [1, 1, 176, 1], [1, 1], [1, 1], "SAME", "NCHW" ) _test_convolution( "depthwise", [4, 19, 17, 17], [3, 3, 19, 1], [1, 1], [2, 2], "VALID", "NCHW" ) _test_convolution( "depthwise", [4, 124, 17, 17], [1, 1, 124, 1], [1, 1], [1, 1], "SAME", "NCHW" ) _test_convolution( "depthwise", [4, 12, 17, 17], [3, 3, 12, 1], [1, 1], [2, 2], "VALID", "NCHW" ) _test_convolution( "depthwise", [4, 12, 17, 17], [3, 3, 12, 2], [1, 1], [2, 2], "VALID", "NCHW" ) _test_convolution( "conv_transpose", [4, 32, 8, 8], [1, 1, 176, 32], [1, 1], [1, 1], "SAME", "NCHW", [4, 176, 8, 8], ) _test_convolution( "conv_transpose", [4, 32, 8, 8], [2, 2, 176, 32], [1, 1], [1, 1], "SAME", "NCHW", [4, 176, 8, 8], ) _test_convolution( "conv_transpose", [4, 32, 8, 8], [2, 2, 176, 32], [1, 1], [2, 2], "SAME", "NCHW", [4, 176, 15, 15], ) _test_convolution( "conv_transpose", [4, 32, 8, 8], [3, 3, 176, 32], [1, 1], [1, 1], "SAME", "NCHW", [4, 176, 8, 8], ) _test_convolution( "conv_transpose", [4, 32, 8, 8], [3, 3, 176, 32], [1, 1], [2, 2], "SAME", "NCHW", [4, 176, 15, 15], ) _test_convolution( "conv_transpose", [4, 32, 8, 8], [3, 3, 176, 32], [1, 1], [2, 2], "SAME", "NCHW", [4, 176, 16, 16], ) _test_convolution( "conv_transpose", [4, 19, 8, 8], [3, 3, 19, 19], [1, 1], [2, 2], "VALID", "NCHW", [4, 19, 17, 17], ) _test_convolution( "conv_transpose", [4, 19, 17, 17], [1, 1, 124, 19], [1, 1], [1, 1], "SAME", "NCHW", [4, 124, 17, 17], ) _test_convolution( "conv_transpose", [4, 19, 17, 17], [3, 3, 124, 19], [1, 1], [1, 1], "SAME", "NCHW", [4, 124, 17, 17], ) _test_convolution( "conv_transpose", [4, 32, 8, 8], [3, 3, 12, 32], [1, 1], [2, 2], "VALID", "NCHW", [4, 12, 17, 17], ) # kernel 2x2, strides (2,2) _test_convolution( "conv_transpose", [4, 19, 8, 8], [2, 2, 19, 19], [1, 1], [2, 2], "VALID", "NCHW", [4, 19, 16, 16], ) _test_convolution( "conv_transpose", [4, 32, 8, 8], [2, 2, 12, 32], [1, 1], [2, 2], "VALID", "NCHW", [4, 12, 16, 16], ) # output channel is 1 _test_convolution( "conv_transpose", [1, 19, 8, 8], [1, 1, 1, 19], [1, 1], [1, 1], "VALID", "NCHW", [1, 1, 8, 8], ) _test_convolution("conv", [4, 8, 8, 176], [1, 1, 176, 32], [1, 1], [1, 1], "SAME", "NHWC") _test_convolution("conv", [4, 17, 17, 19], [3, 3, 19, 19], [1, 1], [2, 2], "VALID", "NHWC") _test_convolution("conv", [4, 17, 17, 124], [1, 1, 124, 19], [1, 1], [1, 1], "SAME", "NHWC") _test_convolution("conv", [4, 17, 17, 12], [3, 3, 12, 32], [1, 1], [2, 2], "VALID", "NHWC") _test_convolution( "conv", [4, 17, 17, 12], [3, 3, 12, 32], [1, 1], [2, 2], "VALID", "NHWC", add_shapes_to_graph_def=False, ) _test_convolution("depthwise", [4, 8, 8, 176], [1, 1, 176, 1], [1, 1], [1, 1], "SAME", "NHWC") _test_convolution("depthwise", [4, 17, 17, 19], [3, 3, 19, 1], [1, 1], [2, 2], "VALID", "NHWC") _test_convolution("depthwise", [4, 17, 17, 124], [1, 1, 124, 1], [1, 1], [1, 1], "SAME", "NHWC") _test_convolution("depthwise", [4, 17, 17, 12], [3, 3, 12, 1], [1, 1], [2, 2], "VALID", "NHWC") _test_convolution("depthwise", [4, 17, 17, 12], [3, 3, 12, 2], [1, 1], [2, 2], "VALID", "NHWC") _test_convolution( "depthwise", [4, 17, 17, 12], [3, 3, 12, 2], [1, 1], [2, 2], "VALID", "NHWC", add_shapes_to_graph_def=False, ) _test_convolution( "conv_transpose", [4, 8, 8, 32], [1, 1, 176, 32], [1, 1], [1, 1], "SAME", "NHWC", [4, 8, 8, 176], ) _test_convolution( "conv_transpose", [4, 8, 8, 32], [2, 2, 176, 32], [1, 1], [1, 1], "SAME", "NHWC", [4, 8, 8, 176], ) _test_convolution( "conv_transpose", [4, 8, 8, 32], [2, 2, 176, 32], [1, 1], [2, 2], "SAME", "NHWC", [4, 15, 15, 176], ) _test_convolution( "conv_transpose", [4, 8, 8, 32], [3, 3, 176, 32], [1, 1], [1, 1], "SAME", "NHWC", [4, 8, 8, 176], ) _test_convolution( "conv_transpose", [4, 8, 8, 32], [3, 3, 176, 32], [1, 1], [2, 2], "SAME", "NHWC", [4, 15, 15, 176], ) _test_convolution( "conv_transpose", [4, 8, 8, 32], [3, 3, 176, 32], [1, 1], [2, 2], "SAME", "NHWC", [4, 16, 16, 176], ) _test_convolution( "conv_transpose", [4, 8, 8, 19], [3, 3, 19, 19], [1, 1], [2, 2], "VALID", "NHWC", [4, 17, 17, 19], ) _test_convolution( "conv_transpose", [4, 17, 17, 19], [1, 1, 124, 19], [1, 1], [1, 1], "SAME", "NHWC", [4, 17, 17, 124], ) _test_convolution( "conv_transpose", [4, 17, 17, 19], [3, 3, 124, 19], [1, 1], [1, 1], "SAME", "NHWC", [4, 17, 17, 124], ) _test_convolution( "conv_transpose", [4, 8, 8, 32], [3, 3, 12, 32], [1, 1], [2, 2], "VALID", "NHWC", [4, 17, 17, 12], ) # kernel 2x2, strides (2,2) _test_convolution( "conv_transpose", [4, 8, 8, 19], [2, 2, 19, 19], [1, 1], [2, 2], "VALID", "NHWC", [4, 16, 16, 19], ) _test_convolution( "conv_transpose", [4, 8, 8, 32], [2, 2, 12, 32], [1, 1], [2, 2], "VALID", "NHWC", [4, 16, 16, 12], ) # output channel is 1 _test_convolution( "conv_transpose", [1, 8, 8, 19], [1, 1, 1, 19], [1, 1], [1, 1], "VALID", "NHWC", [1, 8, 8, 1], ) # Test without adding shapes to graph def _test_convolution( "conv_transpose", [4, 8, 8, 32], [1, 1, 176, 32], [1, 1], [1, 1], "SAME", "NHWC", [4, 8, 8, 176], add_shapes_to_graph_def=False, ) # Explicit padding if package_version.parse(tf.VERSION) >= package_version.parse("2.4.1"): _test_convolution( "conv", [4, 8, 8, 16], [1, 1, 16, 32], [1, 1], [1, 1], [[0, 0], [2, 3], [0, 1], [0, 0]], "NHWC", ) _test_convolution( "depthwise", [4, 8, 8, 16], [1, 1, 16, 1], [1, 1], [1, 1], [[0, 0], [2, 3], [0, 1], [0, 0]], "NHWC", ) _test_convolution( "conv_transpose", [4, 8, 8, 32], [3, 3, 176, 32], [1, 1], [2, 2], [[0, 0], [1, 0], [1, 0], [0, 0]], "NHWC", [4, 16, 16, 176], ) ####################################################################### # Convolution3D # ------------- def _test_convolution3d( opname, tensor_in_sizes, filter_in_sizes, dilations, strides, padding, data_format, deconv_output_shape=[], add_shapes_to_graph_def=True, ): """One iteration of 3D convolution with given shapes and attributes""" total_size_1 = np.prod(tensor_in_sizes) total_size_2 = np.prod(filter_in_sizes) # Initializes the input tensor with array containing incrementing # numbers from 1. data_array = [f * 1.0 for f in range(1, total_size_1 + 1)] filter_array = [f * 1.0 for f in range(1, total_size_2 + 1)] with tf.Graph().as_default(): in_data = array_ops.placeholder(shape=tensor_in_sizes, dtype="float32") in_filter = constant_op.constant(filter_array, shape=filter_in_sizes, dtype="float32") if data_format == "NDHWC": strides = [1] + strides + [1] dilations = [1] + dilations + [1] else: strides = [1, 1] + strides dilations = [1, 1] + dilations if opname == "conv": nn_ops.conv3d( in_data, in_filter, strides=strides, dilations=dilations, padding=padding, data_format=data_format, ) compare_tf_with_tvm( np.reshape(data_array, tensor_in_sizes).astype("float32"), "Placeholder:0", "Conv3D:0", cuda_layout="NCDHW", add_shapes_to_graph_def=add_shapes_to_graph_def, ) @tvm.testing.uses_gpu def test_forward_convolution3d(): if is_gpu_available(): _test_convolution3d( "conv", [4, 176, 8, 8, 8], [1, 1, 1, 176, 32], [1, 1, 1], [1, 1, 1], "SAME", "NCDHW" ) _test_convolution3d( "conv", [4, 19, 17, 17, 17], [3, 3, 3, 19, 19], [1, 1, 1], [2, 2, 2], "VALID", "NCDHW" ) _test_convolution3d( "conv", [4, 124, 17, 17, 17], [1, 1, 1, 124, 19], [1, 1, 1], [1, 1, 1], "SAME", "NCDHW" ) _test_convolution3d( "conv", [4, 12, 17, 17, 17], [3, 3, 3, 12, 32], [1, 1, 1], [2, 2, 2], "VALID", "NCDHW" ) _test_convolution3d( "conv", [4, 8, 8, 8, 176], [1, 1, 1, 176, 32], [1, 1, 1], [1, 1, 1], "SAME", "NDHWC" ) _test_convolution3d( "conv", [4, 17, 17, 17, 19], [3, 3, 3, 19, 19], [1, 1, 1], [2, 2, 2], "VALID", "NDHWC" ) _test_convolution3d( "conv", [4, 17, 17, 17, 124], [1, 1, 1, 124, 19], [1, 1, 1], [1, 1, 1], "SAME", "NDHWC" ) _test_convolution3d( "conv", [4, 17, 17, 17, 12], [3, 3, 3, 12, 32], [1, 1, 1], [2, 2, 2], "VALID", "NDHWC" ) # Test without adding shapes to graph def _test_convolution3d( "conv", [4, 17, 17, 17, 12], [3, 3, 3, 12, 32], [1, 1, 1], [2, 2, 2], "VALID", "NDHWC", add_shapes_to_graph_def=False, ) ####################################################################### # Convolution3D Transpose # ----------------------- def _test_convolution3d_transpose( data_shape, filter_shape, strides, padding, output_shape, data_format="NCDHW", add_shapes_to_graph_def=True, ): """One iteration of 3D convolution transpose with given shapes and attributes""" dtype = "float32" data_array = np.random.uniform(size=data_shape).astype(dtype) filter_array = np.random.uniform(size=filter_shape).astype(dtype) if data_format == "NDHWC": strides = [1] + strides + [1] else: strides = [1, 1] + strides with tf.Graph().as_default(): in_data = array_ops.placeholder(shape=data_shape, dtype=dtype) in_filter = constant_op.constant(filter_array, shape=filter_shape, dtype=dtype) nn_ops.conv3d_transpose( in_data, in_filter, output_shape=output_shape, strides=strides, padding=padding, data_format=data_format, ) compare_tf_with_tvm( data_array, "Placeholder:0", "conv3d_transpose:0", cuda_layout="NDHWC", add_shapes_to_graph_def=add_shapes_to_graph_def, ) @tvm.testing.uses_gpu def test_forward_convolution3d_transpose(): if is_gpu_available(): _test_convolution3d_transpose( data_shape=[1, 10, 8, 8, 8], filter_shape=[1, 1, 1, 6, 10], strides=[1, 1, 1], padding="VALID", output_shape=[1, 6, 8, 8, 8], ) _test_convolution3d_transpose( data_shape=[4, 9, 8, 8, 8], filter_shape=[1, 1, 1, 6, 9], strides=[1, 1, 1], padding="VALID", output_shape=[4, 6, 8, 8, 8], ) _test_convolution3d_transpose( data_shape=[1, 3, 8, 8, 8], filter_shape=[1, 1, 1, 6, 3], strides=[2, 2, 2], padding="SAME", output_shape=[1, 6, 15, 15, 15], ) _test_convolution3d_transpose( data_shape=[1, 16, 8, 8, 8], filter_shape=[3, 3, 3, 6, 16], strides=[3, 3, 3], padding="VALID", output_shape=[1, 6, 24, 24, 24], ) _test_convolution3d_transpose( data_shape=[1, 8, 8, 8, 10], filter_shape=[1, 1, 1, 6, 10], strides=[1, 1, 1], padding="VALID", output_shape=[1, 8, 8, 8, 6], data_format="NDHWC", ) _test_convolution3d_transpose( data_shape=[4, 8, 8, 8, 9], filter_shape=[1, 1, 1, 6, 9], strides=[1, 1, 1], padding="VALID", output_shape=[4, 8, 8, 8, 6], data_format="NDHWC", ) _test_convolution3d_transpose( data_shape=[1, 8, 8, 8, 3], filter_shape=[1, 1, 1, 6, 3], strides=[2, 2, 2], padding="SAME", output_shape=[1, 15, 15, 15, 6], data_format="NDHWC", ) _test_convolution3d_transpose( data_shape=[1, 8, 8, 8, 16], filter_shape=[3, 3, 3, 6, 16], strides=[3, 3, 3], padding="VALID", output_shape=[1, 24, 24, 24, 6], data_format="NDHWC", ) # Test without adding shapes to graph def _test_convolution3d_transpose( data_shape=[1, 8, 8, 8, 16], filter_shape=[3, 3, 3, 6, 16], strides=[3, 3, 3], padding="VALID", output_shape=[1, 24, 24, 24, 6], data_format="NDHWC", add_shapes_to_graph_def=False, ) ####################################################################### # BiasAdd # ----------- def _test_biasadd(tensor_in_sizes, data_format): """One iteration of biasadd with given shapes and attributes""" total_size_1 = 1 for s in tensor_in_sizes: total_size_1 *= s tensor_bias_sizes = [tensor_in_sizes[1]] if data_format == "NCHW" else [tensor_in_sizes[3]] total_size_2 = tensor_bias_sizes[0] # Initializes the input tensor with array containing incrementing # numbers from 1. data_array = [f * 1.0 for f in range(1, total_size_1 + 1)] bias_array = [f * 1.0 for f in range(1, total_size_2 + 1)] with tf.Graph().as_default(): in_data = array_ops.placeholder(shape=tensor_in_sizes, dtype="float32") in_bias = constant_op.constant(bias_array, shape=tensor_bias_sizes, dtype="float32") nn_ops.bias_add(in_data, in_bias, data_format=data_format) compare_tf_with_tvm( np.reshape(data_array, tensor_in_sizes).astype("float32"), "Placeholder:0", "BiasAdd:0" ) @tvm.testing.uses_gpu def test_forward_biasadd(): if is_gpu_available(): _test_biasadd([4, 176, 8, 8], "NCHW") _test_biasadd([1, 100, 1, 1], "NCHW") _test_biasadd([4, 19, 17, 17], "NCHW") _test_biasadd([4, 124, 3, 3], "NCHW") _test_biasadd([4, 8, 8, 176], "NHWC") _test_biasadd([1, 1, 1, 100], "NHWC") _test_biasadd([4, 17, 17, 19], "NHWC") _test_biasadd([4, 3, 3, 124], "NHWC") def _test_forward_where(input_shape): with tf.Graph().as_default(): dtype = tf.float32 t = tf.constant( np.random.choice([0, 1, -2, 3, -1, 0.1, -0.2], size=input_shape).astype(dtype.name) ) out = tf.where(t) compare_tf_with_tvm([], [], out.name, mode="debug") compare_tf_with_tvm([], [], out.name, mode="vm") def test_forward_argwhere(): _test_forward_where((5,)) _test_forward_where((5, 5)) _test_forward_where((5, 5, 5)) _test_forward_where((5, 5, 5, 5)) _test_forward_where((5, 5, 5, 5, 5)) ####################################################################### # SpaceToBatchND # -------------- def _test_space_to_batch_nd(input_shape, block_shape, paddings, dtype="int32"): data = np.random.uniform(0, 5, size=input_shape).astype(dtype) with tf.Graph().as_default(): in_data = tf.placeholder(shape=input_shape, dtype=dtype) out = tf.space_to_batch_nd(in_data, block_shape, paddings) compare_tf_with_tvm(data, in_data.name, out.name) def _test_space_to_batch_nd_infer_paddings(input_shape, block_shape, dtype="int32"): data = np.random.uniform(0, 5, size=input_shape).astype(dtype) padding_np = np.array([0, 1]).astype(np.int32).reshape((1, 2)) with tf.Graph().as_default(): in_data = tf.placeholder(shape=input_shape, dtype=dtype) const1 = tf.constant(padding_np, dtype=tf.int32) # make paddings an input to tf.transpose, but not an input to the graph, # so it can be extracted with infer_value_simulated paddings = tf.reverse(const1, axis=[-1]) out = tf.space_to_batch_nd(in_data, block_shape, paddings) compare_tf_with_tvm(data, in_data.name, out.name) def test_forward_space_to_batch_nd(): # test cases: https://www.tensorflow.org/api_docs/cc/class/tensorflow/ops/space-to-batch-n-d _test_space_to_batch_nd(input_shape=[1, 2, 2, 1], block_shape=[2, 2], paddings=[[0, 0], [0, 0]]) _test_space_to_batch_nd(input_shape=[1, 2, 2, 3], block_shape=[2, 2], paddings=[[0, 0], [0, 0]]) _test_space_to_batch_nd(input_shape=[1, 4, 4, 1], block_shape=[2, 2], paddings=[[0, 0], [0, 0]]) _test_space_to_batch_nd( input_shape=[2, 2, 4, 1], block_shape=[2, 2], paddings=[[0, 0], [2, 0]], dtype="int64" ) # pylint: disable=line-too-long # https://github.com/tensorflow/tensorflow/blob/24f578/tensorflow/python/kernel_tests/spacetobatch_op_test.py _test_space_to_batch_nd(input_shape=[2, 3], block_shape=[2], paddings=[[1, 0]], dtype="float32") _test_space_to_batch_nd( input_shape=[2, 3, 2], block_shape=[2], paddings=[[1, 0]], dtype="float64" ) _test_space_to_batch_nd_infer_paddings(input_shape=[2, 3, 2], block_shape=[2]) ####################################################################### # BatchToSpaceND # -------------- def _test_batch_to_space_nd(input_shape, block_shape, crops, dtype="int32"): data = np.random.uniform(0, 5, size=input_shape).astype(dtype) with tf.Graph().as_default(): in_data = tf.placeholder(shape=input_shape, dtype=dtype) out = tf.batch_to_space_nd(in_data, block_shape, crops) compare_tf_with_tvm(data, in_data.name, out.name) def test_forward_batch_to_space_nd(): # test cases: https://www.tensorflow.org/api_docs/cc/class/tensorflow/ops/batch-to-space-n-d _test_batch_to_space_nd(input_shape=[4, 1, 1, 1], block_shape=[2, 2], crops=[[0, 0], [0, 0]]) _test_batch_to_space_nd(input_shape=[4, 1, 1, 3], block_shape=[2, 2], crops=[[0, 0], [0, 0]]) _test_batch_to_space_nd(input_shape=[4, 2, 2, 1], block_shape=[2, 2], crops=[[0, 0], [0, 0]]) _test_batch_to_space_nd( input_shape=[8, 1, 3, 1], block_shape=[2, 2], crops=[[0, 0], [2, 0]], dtype="int64" ) # pylint: disable=line-too-long # https://github.com/tensorflow/tensorflow/blob/24f578/tensorflow/python/kernel_tests/batchtospace_op_test.py _test_batch_to_space_nd( input_shape=[18, 2, 1, 2], block_shape=[2, 3], crops=[[1, 1], [0, 0]], dtype="float32" ) _test_batch_to_space_nd( input_shape=[20, 5, 8, 7], block_shape=[2, 2], crops=[[1, 1], [1, 1]], dtype="float64" ) ####################################################################### # Reshape # ------- def _test_reshape(data, out_shape): """One iteration of reshape operation with given data and out shape""" with tf.Graph().as_default(): in_data = array_ops.placeholder(shape=data.shape, dtype=data.dtype) array_ops.reshape(in_data, out_shape) compare_tf_with_tvm(data, "Placeholder:0", "Reshape:0") def _test_reshape_with_call(): """relay.expr.Call as shape""" data = np.zeros((6, 4, 2)) with tf.Graph().as_default(): in_data = array_ops.placeholder(shape=data.shape, dtype=data.dtype) out_shape = tf.constant([1, 2, 3], dtype="int32") out_shape = tf.multiply(out_shape, 2) array_ops.reshape(in_data, out_shape) compare_tf_with_tvm(data, "Placeholder:0", "Reshape:0") def _test_reshape_like(data, shape_like): """A special case for reshape.""" with tf.Graph().as_default(): in_data = array_ops.placeholder(shape=data.shape, dtype=data.dtype) in_shape_like = array_ops.placeholder(shape=shape_like.shape, dtype=data.dtype) out_shape = array_ops.shape(in_shape_like) array_ops.reshape(in_data, out_shape) compare_tf_with_tvm(data, "Placeholder:0", "Reshape:0") def _test_reshape_symbolic(data, a_data, b_data): with tf.Graph().as_default(): in_data = array_ops.placeholder(shape=data.shape, dtype=data.dtype) a = array_ops.placeholder(shape=a_data.shape, dtype=a_data.dtype) b = array_ops.placeholder(shape=b_data.shape, dtype=b_data.dtype) newshape = tf.add(a, b) out = array_ops.reshape(in_data, newshape) for mode in ["debug", "vm"]: compare_tf_with_tvm( [data, a_data, b_data], [in_data.name, a.name, b.name], out.name, mode=mode ) def test_forward_reshape(): _test_reshape(np.arange(6.0), [2, 3]) _test_reshape(np.arange(6), [-1, 2]) _test_reshape(np.arange(6), [3, -1]) _test_reshape(np.arange(6), [-1]) _test_reshape_with_call() _test_reshape_like(np.zeros((3, 6)), np.zeros((9, 2))) _test_reshape_symbolic(np.arange(6.0), np.array([2, 0]), np.array([0, 3])) _test_reshape_symbolic(np.arange(6), np.array([-1, 0]), np.array([0, 2])) _test_reshape_symbolic(np.arange(6), np.array([3, 0]), np.array([3, -1])) _test_reshape_symbolic(np.arange(6), np.array([0]), np.array([-1])) ####################################################################### # DepthToSpace # ------------ def _test_depthtospace(data, block_size): """One iteration of depth_to_space operation with given data and block size""" with tf.Graph().as_default(): in_data = array_ops.placeholder(shape=data.shape, dtype=data.dtype) array_ops.depth_to_space(in_data, block_size) compare_tf_with_tvm(data, "Placeholder:0", "DepthToSpace:0") def test_forward_depthtospace(): _test_depthtospace(np.random.normal(size=[1, 32, 32, 4]), 2) _test_depthtospace(np.random.normal(size=[1, 16, 8, 32]), 4) ####################################################################### # SpaceToDepth # ------------ def _test_spacetodepth(data, block_size): """One iteration of space_to_depth operation with given data and block size""" with tf.Graph().as_default(): in_data = array_ops.placeholder(shape=data.shape, dtype=data.dtype) array_ops.space_to_depth(in_data, block_size) compare_tf_with_tvm(data, "Placeholder:0", "SpaceToDepth:0") def test_forward_spacetodepth(): _test_spacetodepth(np.random.normal(size=[1, 32, 32, 4]), 2) _test_spacetodepth(np.random.normal(size=[1, 16, 8, 32]), 4) ####################################################################### # Squeeze # ------- def _test_squeeze(data, squeeze_dims=None): """One iteration of squeeze""" if squeeze_dims is None: squeeze_dims = [] with tf.Graph().as_default(): in_data = array_ops.placeholder(shape=data.shape, dtype=data.dtype) if squeeze_dims: array_ops.squeeze(in_data, squeeze_dims) else: array_ops.squeeze(in_data) compare_tf_with_tvm(data, "Placeholder:0", "Squeeze:0") def test_forward_squeeze(): """Squeeze""" # Nothing to squeeze. _test_squeeze(np.arange(2).reshape((2))) _test_squeeze(np.arange(6).reshape((2, 3))) # Squeeze the middle element away. _test_squeeze(np.arange(4).reshape((2, 1, 2))) # Squeeze on both ends. _test_squeeze(np.arange(6).reshape((1, 2, 1, 3, 1))) # Positive squeeze dim index. _test_squeeze(np.arange(6).reshape((1, 2, 1, 3, 1)), [0]) _test_squeeze(np.arange(6).reshape((1, 2, 1, 3, 1)), [2, 4]) _test_squeeze(np.arange(6).reshape((1, 2, 1, 3, 1)), [0, 4, 2]) # Negative squeeze dim index. _test_squeeze(np.arange(6).reshape((1, 2, 1, 3, 1)), [-1]) _test_squeeze(np.arange(6).reshape((1, 2, 1, 3, 1)), [-3, -5]) _test_squeeze(np.arange(6).reshape((1, 2, 1, 3, 1)), [-3, -5, -1]) ####################################################################### # TensorArray # ----------- def test_tensor_array_write_read(): if package_version.parse(tf.VERSION) >= package_version.parse("1.15.0"): pytest.skip("Needs fixing for tflite >= 1.15.0") def run(dtype_str, infer_shape, element_shape): with tf.Graph().as_default(): dtype = tf_dtypes[dtype_str] np_data = np.array([[1.0, 2.0], [3.0, 4.0]]).astype(dtype_str) in_data = [np_data, np_data] t1 = tf.constant(np_data, dtype=dtype) t2 = tf.constant(np_data, dtype=dtype) ta1 = tf.TensorArray( dtype=dtype, size=2, infer_shape=infer_shape, element_shape=element_shape ) ta2 = ta1.write(0, t1) ta3 = ta2.write(1, t2) out = ta3.read(0) g = tf.get_default_graph() compare_tf_with_tvm([], [], "TensorArrayReadV3:0", mode="vm") for dtype in ["float32", "int8"]: run(dtype, False, None) run(dtype, False, tf.TensorShape([None, 2])) run(dtype, True, None) def test_tensor_array_scatter(): if package_version.parse(tf.VERSION) >= package_version.parse("1.15.0"): pytest.skip("Needs fixing for tflite >= 1.15.0") def run(dtype_str, infer_shape): with tf.Graph().as_default(): dtype = tf_dtypes[dtype_str] if infer_shape: element_shape = tf.TensorShape([tf.Dimension(None)]) else: element_shape = None t = tf.constant(np.array([[1.0], [2.0], [3.0]]).astype(dtype_str), dtype=dtype) indices = tf.constant([2, 1, 0]) ta1 = tf.TensorArray( dtype=dtype, size=3, infer_shape=infer_shape, element_shape=element_shape ) ta2 = ta1.scatter(indices, t) out0 = ta2.read(0) out1 = ta2.read(1) out2 = ta2.read(2) g = tf.get_default_graph() compare_tf_with_tvm([], [], ["TensorArrayReadV3:0"], mode="vm") compare_tf_with_tvm([], [], ["TensorArrayReadV3_1:0"], mode="vm") compare_tf_with_tvm([], [], ["TensorArrayReadV3_2:0"], mode="vm") for dtype in ["float32", "int8"]: run(dtype, False) run(dtype, True) def test_tensor_array_gather(): if package_version.parse(tf.VERSION) >= package_version.parse("1.15.0"): pytest.skip("Needs fixing for tflite >= 1.15.0") def run(dtype_str, infer_shape): with tf.Graph().as_default(): dtype = tf_dtypes[dtype_str] t = tf.constant(np.array([[1.0], [2.0], [3.0]]).astype(dtype_str)) scatter_indices = tf.constant([2, 1, 0]) gather_indices = tf.constant([1, 2]) ta1 = tf.TensorArray(dtype=dtype, size=3, infer_shape=infer_shape) ta2 = ta1.scatter(scatter_indices, t) t1 = ta2.gather(gather_indices) g = tf.get_default_graph() compare_tf_with_tvm([], [], ["TensorArrayGatherV3:0"], mode="vm") for dtype in ["float32", "int8"]: run(dtype, True) def test_tensor_array_split(): if package_version.parse(tf.VERSION) >= package_version.parse("1.15.0"): pytest.skip("Needs fixing for tflite >= 1.15.0") def run(dtype_str, infer_shape): with tf.Graph().as_default(): dtype = tf_dtypes[dtype_str] t = tf.constant( np.array([[1.0], [2.0], [3.0], [4.0], [5.0], [6.0], [7.0], [8.0]]).astype( dtype_str ), dtype=dtype, ) split_length = tf.constant([2, 2, 2, 2], dtype=tf.int32) ta1 = tf.TensorArray(dtype=dtype, size=4, infer_shape=infer_shape) ta2 = ta1.split(t, split_length) out0 = ta2.read(0) out1 = ta2.read(1) out2 = ta2.read(2) out3 = ta2.read(3) g = tf.get_default_graph() compare_tf_with_tvm([], [], ["TensorArrayReadV3:0"], mode="debug") compare_tf_with_tvm([], [], ["TensorArrayReadV3_1:0"], mode="debug") compare_tf_with_tvm([], [], ["TensorArrayReadV3_2:0"], mode="debug") compare_tf_with_tvm([], [], ["TensorArrayReadV3_3:0"], mode="debug") for dtype in ["float32", "int8"]: run(dtype, False) run(dtype, True) def test_tensor_array_concat(): if package_version.parse(tf.VERSION) >= package_version.parse("1.15.0"): pytest.skip("Needs fixing for tflite >= 1.15.0") def run(dtype_str, infer_shape): with tf.Graph().as_default(): dtype = tf_dtypes[dtype_str] t = tf.constant( np.array([[1.0], [2.0], [3.0], [4.0], [5.0], [6.0], [7.0], [8.0]]).astype( dtype_str ), dtype=dtype, ) split_length = tf.constant([2, 2, 2, 2], dtype=tf.int32) ta1 = tf.TensorArray(dtype=dtype, size=4, infer_shape=infer_shape) ta2 = ta1.split(t, split_length) t = ta2.concat() out = tf.identity(t) compare_tf_with_tvm([], [], ["Identity:0"], mode="debug") for dtype in ["float32", "int8"]: run(dtype, False) run(dtype, True) def test_tensor_array_size(): if package_version.parse(tf.VERSION) >= package_version.parse("1.15.0"): pytest.skip("Needs fixing for tflite >= 1.15.0") def run(dtype_str, infer_shape): with tf.Graph().as_default(): dtype = tf_dtypes[dtype_str] np_data = np.array([[1.0, 2.0], [3.0, 4.0]]).astype(dtype_str) in_data = [np_data, np_data] t1 = tf.constant(np_data, dtype=dtype) t2 = tf.constant(np_data, dtype=dtype) ta1 = tf.TensorArray(dtype=dtype, size=2, infer_shape=infer_shape) ta2 = ta1.write(0, t1) ta3 = ta2.write(1, t2) out = ta3.size() g = tf.get_default_graph() compare_tf_with_tvm([], [], "TensorArraySizeV3:0", mode="debug") for dtype in ["float32", "int8"]: run(dtype, False) run(dtype, True) def test_tensor_array_stack(): def run(dtype_str, infer_shape): if package_version.parse(tf.VERSION) >= package_version.parse("1.15.0"): pytest.skip("Needs fixing for tflite >= 1.15.0") with tf.Graph().as_default(): dtype = tf_dtypes[dtype_str] t = tf.constant(np.array([[1.0], [2.0], [3.0]]).astype(dtype_str)) scatter_indices = tf.constant([2, 1, 0]) ta1 = tf.TensorArray(dtype=dtype, size=3, infer_shape=infer_shape) ta2 = ta1.scatter(scatter_indices, t) t1 = ta2.stack() print(t1) g = tf.get_default_graph() compare_tf_with_tvm([], [], ["TensorArrayStack/TensorArrayGatherV3:0"], mode="vm") for dtype in ["float32", "int8"]: run(dtype, True) def test_tensor_array_unstack(): def run(dtype_str, input_shape, infer_shape): if package_version.parse(tf.VERSION) >= package_version.parse("1.15.0"): pytest.skip("Needs fixing for tflite >= 1.15.0") with tf.Graph().as_default(): dtype = tf_dtypes[dtype_str] t = tf.constant(np.random.choice([0, 1, 2, 3], size=input_shape).astype(dtype.name)) ta1 = tf.TensorArray(dtype=dtype, infer_shape=infer_shape, size=input_shape[0]) ta2 = ta1.unstack(t) out0 = ta2.size() out1 = ta2.read(0) compare_tf_with_tvm([], [], "TensorArraySizeV3:0", mode="debug") compare_tf_with_tvm([], [], "TensorArrayReadV3:0", mode="debug") for dtype in ["float32", "int8"]: run(dtype, (5,), False) run(dtype, (5, 5), True) run(dtype, (5, 5, 5), False) run(dtype, (5, 5, 5, 5), True) ####################################################################### # ConcatV2 # -------- def _test_concat_v2(shape1, shape2, dim): """One iteration of ConcatV2""" with tf.Graph().as_default(): dtype = "float32" in1 = tf.placeholder(shape=shape1, dtype=dtype, name="in1") in2 = tf.placeholder(shape=shape2, dtype=dtype, name="in2") array_ops.concat_v2([in1, in2], dim) np_data1 = np.random.uniform(size=shape1).astype(dtype) np_data2 = np.random.uniform(size=shape2).astype(dtype) compare_tf_with_tvm([np_data1, np_data2], ["in1:0", "in2:0"], "ConcatV2:0") def test_forward_concat_v2(): if tf.__version__ < LooseVersion("1.4.1"): return _test_concat_v2([2, 3], [2, 3], 0) _test_concat_v2([10, 3, 5], [2, 3, 5], 0) _test_concat_v2([2, 3], [2, 3], 1) _test_concat_v2([5, 8], [5, 4], 1) _test_concat_v2([2, 8, 5], [2, 8, 6], -1) ####################################################################### # Sigmoid # ------- def _test_sigmoid(data): """One iteration of sigmoid""" with tf.Graph().as_default(): in_data = array_ops.placeholder(shape=data.shape, dtype=data.dtype) sigmoid_out = math_ops.sigmoid(in_data) compare_tf_with_tvm(data, "Placeholder:0", "Sigmoid:0") def test_forward_sigmoid(): """Sigmoid""" _test_sigmoid(np.random.uniform(size=(3, 4, 4, 3)).astype("float32")) ####################################################################### # Argmin/Argmax # ------------- def _test_argx(func, data, **kwargs): with tf.Graph().as_default(): inp = array_ops.placeholder(shape=data.shape, dtype=data.dtype, name="c0") func(inp, name="argx0", **kwargs) compare_tf_with_tvm(data, "c0:0", "argx0:0") def test_forward_argminmax(): for output_type in [tf.int64, tf.int32]: for axis in [None, 0, 1, 2]: data = np.random.uniform(size=(8, 4, 9)).astype("float32") _test_argx(tf.argmax, data=data, axis=axis, output_type=output_type) _test_argx(tf.argmin, data=data, axis=axis, output_type=output_type) ####################################################################### # Variable # -------- def _test_variable(data): """One iteration of a variable""" tf.reset_default_graph() with tf.Graph().as_default(): input_op = array_ops.placeholder(shape=data.shape, dtype=data.dtype) input_tensor = array_ops.reshape(input_op, data.shape) size = input_tensor.shape.dims[1] with variable_scope.variable_scope("linear", reuse=None): w = variable_scope.get_variable("w", shape=[size, size], dtype=input_tensor.dtype) math_ops.matmul(input_tensor, w) compare_tf_with_tvm(data, "Placeholder:0", "MatMul:0", init_global_variables=True) def test_forward_variable(): """Variable type op test""" _test_variable(np.random.uniform(size=(32, 100)).astype("float32")) @tvm.testing.parametrize_targets("llvm", "cuda") def test_read_variable_op(target, dev): """Read Variable op test""" tf.reset_default_graph() data = np.random.uniform(size=(32, 100)).astype("float32") input_tensor = array_ops.placeholder(shape=data.shape, dtype=data.dtype) size = input_tensor.shape.dims[1] var_data = np.random.uniform(-5, 5, size=[size, size]).astype(np.float32) input_var = tf.Variable(var_data, name="var1", use_resource=True) math_ops.matmul(input_tensor, input_var) out_name = ["MatMul:0"] out_node = ["MatMul"] in_name = ["Placeholder:0"] in_node = ["Placeholder"] in_data = [data] with tf.Session() as sess: sess.run(variables.global_variables_initializer()) final_graph_def = sess.graph.as_graph_def(add_shapes=True) tf_output = run_tf_graph(sess, in_data, in_name, out_name) shape_dict = {e: i.shape for e, i in zip(in_name, in_data)} with pytest.raises(Exception) as execinfo: mod, params = relay.frontend.from_tensorflow( final_graph_def, layout=None, shape=shape_dict, outputs=None ) assert execinfo.value.args[0].startswith("Graph is not frozen. Provide a frozen graph") # Now convert the variables to constant and run inference on the converted graph final_graph_def = tf.graph_util.convert_variables_to_constants( sess, sess.graph.as_graph_def(add_shapes=True), out_node, ) tvm_output = run_tvm_graph( final_graph_def, in_data, in_node, target=target, out_names=out_name, num_output=len(out_name), ) for i in range(len(tf_output)): tvm.testing.assert_allclose(tf_output[i], tvm_output[i], atol=1e-4, rtol=1e-5) sess.close() ####################################################################### # MatMul, BatchMatMul, BatchMatMulV2 # ---------------------------------- def _test_matmul(i, j, k, dtype, outer=None): """One iteration of matmul""" A_shape_init = [i, j] B_shape_init = [j, k] for transpose_a in [False, True]: for transpose_b in [False, True]: outer = outer or [] A_shape = outer + (A_shape_init[::-1] if transpose_a else A_shape_init) B_shape = outer + (B_shape_init[::-1] if transpose_b else B_shape_init) with tf.Graph().as_default(): A = tf.placeholder(shape=A_shape, dtype=dtype, name="A") B = tf.placeholder(shape=B_shape, dtype=dtype, name="B") result = tf.matmul(A, B, transpose_a=transpose_a, transpose_b=transpose_b) A_np = np.random.uniform(high=5.0, size=A_shape).astype(dtype) B_np = np.random.uniform(high=5.0, size=B_shape).astype(dtype) compare_tf_with_tvm([A_np, B_np], [A.name, B.name], result.name) def test_forward_matmul(): """MatMul op test""" _test_matmul(1, 3, 6, "int32") _test_matmul(5, 3, 1, "float64") def _test_batch_matmul(A_shape, B_shape, dtype, adjoint_a=False, adjoint_b=False): with tf.Graph().as_default(): A = tf.placeholder(shape=A_shape, dtype=dtype, name="A") B = tf.placeholder(shape=B_shape, dtype=dtype, name="B") result = tf.matmul(A, B, adjoint_a=adjoint_a, adjoint_b=adjoint_b, name="batchmatmul") A_np = np.random.uniform(high=5.0, size=A_shape).astype(dtype) B_np = np.random.uniform(high=5.0, size=B_shape).astype(dtype) compare_tf_with_tvm([A_np, B_np], [A.name, B.name], result.name) def _test_batch_matmul_dynamic( A_shape, B_shape, A_np_shape, B_np_shape, dtype, adjoint_a=False, adjoint_b=False ): with tf.Graph().as_default(): A = tf.placeholder(shape=A_shape, dtype=dtype, name="A") B = tf.placeholder(shape=B_shape, dtype=dtype, name="B") result = tf.matmul(A, B, adjoint_a=adjoint_a, adjoint_b=adjoint_b, name="batchmatmul") A_np = np.random.uniform(high=5.0, size=A_np_shape).astype(dtype) B_np = np.random.uniform(high=5.0, size=B_np_shape).astype(dtype) # for now, in TOPI, only cublas's implementation support dynamic shape # TODO add more backends support in TOPI compare_tf_with_tvm( [A_np, B_np], [A.name, B.name], result.name, mode="vm", targets=["cuda -libs=cublas"] ) def test_forward_batch_matmul(): """TF op BatchMatMul, BatchMatMulV2 test""" _test_batch_matmul((3, 5, 4), (3, 4, 5), "int32") _test_batch_matmul((3, 5, 4), (3, 4, 5), "float32", True, True) _test_batch_matmul((3, 5, 4), (3, 5, 4), "int32", True, False) _test_batch_matmul((3, 5, 4), (3, 5, 4), "float32", False, True) _test_batch_matmul((2, 3, 4, 5, 6), (2, 3, 4, 6, 5), "int32") _test_batch_matmul((1, 2, 3, 4, 5, 6), (1, 2, 3, 4, 6, 5), "float32", True, True) _test_batch_matmul((3, 4, 5, 6), (3, 4, 5, 6), "int32", True, False) _test_batch_matmul((2, 3, 4, 2, 3, 4, 5, 6), (2, 3, 4, 2, 3, 4, 5, 6), "float32", False, True) _test_batch_matmul((1, 8, 64, 2), (2, 1), "float32", False, False) _test_batch_matmul((1, 8, 8, 64), (64, 1), "float32", False, False) _test_batch_matmul((1, 8, 64), (64, 1), "float32", False, False) @tvm.testing.requires_cuda def test_forward_batch_matmul_dynamic(): _test_batch_matmul_dynamic((None, 5, 4), (None, 4, 5), (3, 5, 4), (3, 4, 5), "int32") _test_batch_matmul_dynamic( (None, 5, 4), (None, 4, 5), (3, 5, 4), (3, 4, 5), "float32", True, True ) _test_batch_matmul_dynamic( (None, 5, 4), (None, 5, 4), (3, 5, 4), (3, 5, 4), "int32", True, False ) _test_batch_matmul_dynamic( (None, 5, 4), (None, 5, 4), (3, 5, 4), (3, 5, 4), "float32", False, True ) _test_batch_matmul_dynamic( (None, 4, 5, 6), (None, 4, 6, 5), (3, 4, 5, 6), (3, 4, 6, 5), "float32" ) _test_batch_matmul_dynamic( (None, None, 5, 6), (None, None, 6, 5), (3, 4, 5, 6), (3, 4, 6, 5), "float32" ) _test_batch_matmul_dynamic( (None, None, None, 5, 6), (None, None, None, 6, 5), (2, 3, 4, 5, 6), (2, 3, 4, 6, 5), "float32", ) _test_batch_matmul_dynamic( (None, None, None, 5, 6), (6, None), (2, 3, 4, 5, 6), (6, 1), "float32", ) _test_batch_matmul_dynamic( (None, 5, 6), (6, None), (24, 5, 6), (6, 1), "float32", ) ####################################################################### # SparseTensorDenseMatMul # ---------------------------------- def _test_sparse_dense_matmul(indices, values, A_inp_shape, B_inp_shape, dtype, flip=False): """One iteration of sparse_dense_matmul""" for adjoint_a in [False, True]: for adjoint_b in [False, True]: A_shape = A_inp_shape[::-1] if adjoint_a else A_inp_shape B_shape = B_inp_shape[::-1] if adjoint_b else B_inp_shape with tf.Graph().as_default(): A_sp = tf.sparse.SparseTensor(indices=indices, values=values, dense_shape=A_shape) B = tf.placeholder(shape=B_shape, dtype=dtype, name="B") if flip: result = tf.sparse.sparse_dense_matmul( B, A_sp, adjoint_a=adjoint_b, adjoint_b=adjoint_a ) else: result = tf.sparse.sparse_dense_matmul( A_sp, B, adjoint_a=adjoint_a, adjoint_b=adjoint_b ) B_np = np.random.uniform(high=5.0, size=B_shape).astype(dtype) compare_tf_with_tvm([B_np], [B.name], result.name) def test_forward_sparse_dense_matmul(): """sparse_dense_matmul op test""" ################################################################### # # In order to create a SparseTensor, it requires 3 input as below: # SparseTensor(indices=[[0, 0], [1, 2]], values=[1, 2], dense_shape=[3, 4]) # # Above Sparse can be represented in Dense as below : # [[1, 0, 0, 0] # [0, 0, 2, 0] # [0, 0, 0, 0]] # # ------------------------------------------------------------------ _test_sparse_dense_matmul([[0, 0], [1, 2]], [4.0, 8.0], [3, 4], [4, 3], "float32") _test_sparse_dense_matmul([[0, 0], [1, 2]], [4.0, 8.0], [3, 3], [3, 3], "float32") _test_sparse_dense_matmul([[0, 0], [1, 3], [4, 3]], [3.0, 6.0, 9.0], [5, 5], [5, 5], "float32") _test_sparse_dense_matmul([[0, 0], [1, 3], [4, 3]], [3.0, 6.0, 9.0], [7, 9], [9, 5], "float32") _test_sparse_dense_matmul([[0, 0], [1, 2]], [4.0, 8.0], [4, 3], [3, 4], "float32", True) _test_sparse_dense_matmul([[0, 0], [1, 2]], [4.0, 8.0], [3, 3], [3, 3], "float32", True) _test_sparse_dense_matmul( [[0, 0], [1, 3], [4, 3]], [3.0, 6.0, 9.0], [5, 5], [5, 5], "float32", True ) _test_sparse_dense_matmul( [[0, 0], [1, 3], [4, 3]], [3.0, 6.0, 9.0], [9, 5], [7, 9], "float32", True ) ####################################################################### # SparseFillEmptyRows # ------------ def _test_sparse_fill_empty_rows(indices_np, values_np, dense_shape_np, default_value_int, use_dyn): with tf.Graph().as_default(): if use_dyn: indices = tf.placeholder(shape=(None, None), dtype=indices_np.dtype, name="indices") values = tf.placeholder(shape=(None), dtype=values_np.dtype, name="values") dense_shape = tf.placeholder( shape=(None), dtype=dense_shape_np.dtype, name="dense_shape" ) else: indices = tf.placeholder(shape=indices_np.shape, dtype=indices_np.dtype, name="indices") values = tf.placeholder(shape=values_np.shape, dtype=values_np.dtype, name="values") dense_shape = tf.placeholder( shape=dense_shape_np.shape, dtype=dense_shape_np.dtype, name="dense_shape" ) default_value = tf.placeholder(shape=(), dtype=values_np.dtype, name="default_value") sp_input = tf.sparse.SparseTensor(indices=indices, values=values, dense_shape=dense_shape) _ = tf.sparse.fill_empty_rows(sp_input, default_value, name="sparse_fill_empty_rows") compare_tf_with_tvm( [indices_np, values_np, dense_shape_np, default_value_int], [indices.name, values.name, dense_shape.name, default_value.name], [ "sparse_fill_empty_rows/SparseFillEmptyRows:0", "sparse_fill_empty_rows/SparseFillEmptyRows:1", "sparse_fill_empty_rows/SparseFillEmptyRows:2", ], mode="vm", ) @pytest.mark.parametrize( "sparse_indices_np, sparse_values_np, dense_shape_np, default_value_int", [ ( np.array([[1, 1], [0, 3], [0, 1], [2, 0], [3, 1]], dtype=np.int64), np.array([1, 2, 3, 4, 5], dtype=np.int64), np.array([5, 6], dtype=np.int64), 10, ), ( np.array([[1, 1], [0, 3], [2, 0], [3, 1]], dtype=np.int64), np.array([1, 2, 3, 4], dtype=np.int64), np.array([5, 6], dtype=np.int64), 10, ), ( np.array([[0, 1], [0, 3], [2, 0], [3, 1]], dtype=np.int64), np.array([1, 2, 3, 4], dtype=np.int64), np.array([5, 6], dtype=np.int64), 10, ), ( np.array([[1, 1, 1], [1, 3, 1], [2, 0, 5], [3, 1, 6]], dtype=np.int64), np.array([1, 2, 3, 4], dtype=np.int64), np.array([7, 7, 7], dtype=np.int64), 5, ), ( np.array([[1], [2]], dtype=np.int64), np.array([7, 8], dtype=np.int64), np.array([5], dtype=np.int64), 4, ), ( np.ones((0, 1), dtype=np.int64), np.array([], dtype=np.int64), np.array([5], dtype=np.int64), 4, ), ( np.ones((0, 3), dtype=np.int64), np.array([], dtype=np.int64), np.array([9, 3, 7], dtype=np.int64), 100, ), ], ) @pytest.mark.parametrize("use_dyn", [True, False]) def test_forward_sparse_fill_empty_rows( sparse_indices_np, sparse_values_np, dense_shape_np, default_value_int, use_dyn ): """sparse_fill_empty_rows op test""" ################################################################### # # In order to create a SparseTensor, it requires 3 input as below: # SparseTensor(indices=[[0, 0], [1, 2]], values=[1, 2], dense_shape=[3, 4]) # # Above Sparse can be represented in Dense as below : # [[1, 0, 0, 0] # [0, 0, 2, 0] # [0, 0, 0, 0]] # # ------------------------------------------------------------------ _test_sparse_fill_empty_rows( sparse_indices_np, sparse_values_np, dense_shape_np, default_value_int, use_dyn ) ####################################################################### # SparseReshape # ------------ def _test_sparse_reshape(indices_np, values_np, prev_shape_np, new_shape_np, use_dyn=False): with tf.Graph().as_default(): if use_dyn: indices = tf.placeholder(shape=(None, None), dtype=indices_np.dtype, name="indices") values = tf.placeholder(shape=(None), dtype=values_np.dtype, name="values") prev_shape = tf.placeholder(shape=(None), dtype=prev_shape_np.dtype, name="prev_shape") new_shape = tf.placeholder(shape=(None), dtype=new_shape_np.dtype, name="new_shape") else: indices = tf.placeholder(shape=indices_np.shape, dtype=indices_np.dtype, name="indices") values = tf.placeholder(shape=values_np.shape, dtype=values_np.dtype, name="values") prev_shape = tf.placeholder( shape=prev_shape_np.shape, dtype=prev_shape_np.dtype, name="prev_shape" ) new_shape = tf.placeholder( shape=new_shape_np.shape, dtype=new_shape_np.dtype, name="new_shape" ) sp_input = tf.sparse.SparseTensor(indices=indices, values=values, dense_shape=prev_shape) _ = tf.sparse.reshape(sp_input, new_shape, name="sparse_reshape") compare_tf_with_tvm( [indices_np, values_np, prev_shape_np, new_shape_np], [indices.name, values.name, prev_shape.name, new_shape.name], ["sparse_reshape:0", "sparse_reshape:1", "sparse_reshape/Identity:0"], mode="vm", ) @pytest.mark.parametrize( "sparse_indices_np, sparse_values_np, prev_shape_np, new_shape_np", [ ( np.ones((0, 1), dtype=np.int64), np.array([], dtype=np.int64), np.array([4], dtype=np.int64), np.array([2, -1], dtype=np.int64), ), ( np.ones((0, 1), dtype=np.int64), np.array([], dtype=np.int64), np.array([4], dtype=np.int64), np.array([2, 2], dtype=np.int64), ), ( np.ones((0, 2), dtype=np.int64), np.array([], dtype=np.int64), np.array([3, 6], dtype=np.int64), np.array([-1, 2], dtype=np.int64), ), ( np.array([[0, 0, 0], [0, 0, 1], [0, 1, 0], [1, 0, 0], [1, 2, 3]], dtype=np.int64), np.array([7, 5, 6, 3, 9], dtype=np.int64), np.array([2, 3, 6], dtype=np.int64), np.array([-1, 9], dtype=np.int64), ), ( np.array( [ [0, 0, 0, 0, 0], [0, 0, 1, 2, 3], [0, 1, 0, 3, 5], [1, 0, 0, 4, 6], [1, 2, 3, 6, 8], ], dtype=np.int64, ), np.array([7, 5, 6, 3, 9], dtype=np.int64), np.array([2, 3, 6, 7, 9], dtype=np.int64), np.array([9, -1, 7], dtype=np.int64), ), ( np.array([[0, 0], [0, 1], [3, 4], [4, 3], [7, 3]], dtype=np.int64), np.array([7, 5, 6, 3, 9], dtype=np.int64), np.array([9, 4], dtype=np.int64), np.array([-1], dtype=np.int64), ), ( np.array([[0], [5], [10], [20], [24]], dtype=np.int64), np.array([7, 5, 6, 3, 9], dtype=np.int64), np.array([25], dtype=np.int64), np.array([5, 5], dtype=np.int64), ), ( np.array([[0, 100], [200, 100], [300, 400], [50, 20], [400, 50]], dtype=np.int64), np.array([7, 5, 6, 3, 9], dtype=np.int64), np.array([500, 20], dtype=np.int64), np.array([500, 20], dtype=np.int64), ), ( np.array([[0, 100], [200, 100], [300, 400], [50, 20], [400, 50]], dtype=np.int64), np.array([7, 5, 6, 3, 9], dtype=np.int64), np.array([500, 20], dtype=np.int64), np.array([500, -1], dtype=np.int64), ), ( np.array([[0, 100], [200, 100], [300, 400], [50, 20], [400, 50]], dtype=np.int64), np.array([7, 5, 6, 3, 9], dtype=np.int64), np.array([500, 20], dtype=np.int64), np.array([250, 40], dtype=np.int64), ), ], ) @pytest.mark.parametrize("use_dyn", [True, False]) def test_forward_sparse_reshape( sparse_indices_np, sparse_values_np, prev_shape_np, new_shape_np, use_dyn ): """sparse_reshape op test""" ################################################################### # # In order to create a SparseTensor, it requires 3 input as below: # SparseTensor(indices=[[0, 0], [1, 2]], values=[1, 2], dense_shape=[3, 4]) # # Above Sparse can be represented in Dense as below : # [[1, 0, 0, 0] # [0, 0, 2, 0] # [0, 0, 0, 0]] # # ------------------------------------------------------------------ _test_sparse_reshape(sparse_indices_np, sparse_values_np, prev_shape_np, new_shape_np, use_dyn) ####################################################################### # Sparse Segment Variants # ------------ def _test_sparse_segment_variant( tf_op, data_np, indices_np, segment_ids_np, num_segments, use_dyn=False ): with tf.Graph().as_default(): if use_dyn: data = tf.placeholder( shape=[None for _ in data_np.shape], dtype=data_np.dtype, name="data" ) indices = tf.placeholder(shape=[None], dtype=indices_np.dtype, name="indices") segment_ids = tf.placeholder( shape=(None), dtype=segment_ids_np.dtype, name="segment_ids" ) else: data = tf.placeholder(shape=data_np.shape, dtype=data_np.dtype, name="data") indices = tf.placeholder(shape=indices_np.shape, dtype=indices_np.dtype, name="indices") segment_ids = tf.placeholder( shape=segment_ids_np.shape, dtype=segment_ids_np.dtype, name="segment_ids" ) _ = tf_op( data, indices, segment_ids, num_segments=num_segments, name="sparse_segment_variant" ) compare_tf_with_tvm( [data_np, indices_np, segment_ids_np], [data.name, indices.name, segment_ids.name], ["sparse_segment_variant:0"], mode="vm", ) @pytest.mark.parametrize( "data_np, indices_np, segment_ids_np, num_segments", [ ( np.array([5, 1, 7, 2, 3, 4], dtype=np.float32), np.array([0, 3, 4], dtype=np.int32), np.array([0, 1, 1], dtype=np.int32), None, ), ( np.array([[1, 2, 3, 4], [-1, -2, -3, -4], [5, 6, 7, 8]], dtype=np.float64), np.array([0, 1], dtype=np.int32), np.array([0, 2], dtype=np.int32), 4, ), ( np.random.random((6, 4, 5)), np.array([0, 2, 4, 3, 1], dtype=np.int32), np.array([0, 0, 1, 5, 5], dtype=np.int32), 100, ), ( np.random.random((6, 4, 5)), np.array([0, 2, 4, 3, 1], dtype=np.int32), np.array([0, 0, 1, 5, 5], dtype=np.int32), None, ), ( np.array([[[1, 7]], [[3, 8]], [[2, 9]]], dtype=np.float64), np.array([0, 1, 2], dtype=np.int32), np.array([0, 0, 1], dtype=np.int32), None, ), ( np.random.random((9, 4, 5, 7)), np.array([0, 1, 2, 3, 4, 5, 6, 7, 8], dtype=np.int32), np.array([0, 0, 1, 3, 5, 6, 7, 7, 8], dtype=np.int32), 9, ), ( np.random.random((9, 4, 5, 7)), np.array([0, 1, 2, 3, 4, 5, 6, 7, 8], dtype=np.int32), np.array([0, 0, 1, 3, 5, 6, 7, 7, 8], dtype=np.int32), None, ), ( np.array([[1, 2, 3, 4], [-1, -2, -3, -4], [5, 6, 7, 8]], dtype=np.float64), np.array([0, 1], dtype=np.int32), np.array([0, 2], dtype=np.int32), None, ), ( np.random.random((9, 4, 5, 7)), np.array([0, 1, 2, 3, 4, 5, 6, 7, 8], dtype=np.int32), np.array([0, 0, 1, 3, 5, 5, 5, 5, 5], dtype=np.int32), 6, ), ], ) @pytest.mark.parametrize("use_dyn", [True, False]) @pytest.mark.parametrize( "tf_op", [ tf.sparse.segment_sum, tf.sparse.segment_sqrt_n, tf.sparse.segment_mean, ], ) def test_forward_sparse_segment_sum_variants( tf_op, data_np, indices_np, segment_ids_np, num_segments, use_dyn, ): """sparse segment sum variants tests""" _test_sparse_segment_variant(tf_op, data_np, indices_np, segment_ids_np, num_segments, use_dyn) ####################################################################### # Math SegmentSum # ------------ def _test_math_segment_sum(data_np, segment_ids_np, use_dyn=False): with tf.Graph().as_default(): if use_dyn: data = tf.placeholder( shape=[None for _ in data_np.shape], dtype=data_np.dtype, name="data" ) segment_ids = tf.placeholder( shape=(None), dtype=segment_ids_np.dtype, name="segment_ids" ) else: data = tf.placeholder(shape=data_np.shape, dtype=data_np.dtype, name="data") segment_ids = tf.placeholder( shape=segment_ids_np.shape, dtype=segment_ids_np.dtype, name="segment_ids" ) _ = tf.math.segment_sum(data, segment_ids, name="segment_sum") compare_tf_with_tvm( [data_np, segment_ids_np], [data.name, segment_ids.name], ["segment_sum:0"], mode="vm", ) @pytest.mark.parametrize( "data_np, segment_ids_np", [ ( np.array([5, 1, 7, 2, 3, 4], dtype=np.float32), np.array([0, 0, 0, 1, 1, 1], dtype=np.int32), ), ( np.array([[1, 2, 3, 4], [-1, -2, -3, -4], [5, 6, 7, 8]], dtype=np.float64), np.array([0, 0, 1], dtype=np.int32), ), ( np.random.random((6, 4, 5)), np.array([0, 0, 1, 2, 2, 3], dtype=np.int64), ), ( np.array([[[1, 7]], [[3, 8]], [[2, 9]]], dtype=np.float32), np.array([0, 0, 1], dtype=np.int32), ), ( np.random.random((9, 4, 5, 7)), np.array([0, 0, 0, 1, 2, 3, 4, 4, 5], dtype=np.int64), ), ], ) @pytest.mark.parametrize("use_dyn", [True, False]) def test_forward_math_segment_sum(data_np, segment_ids_np, use_dyn): """math segment sum test""" _test_math_segment_sum(data_np, segment_ids_np, use_dyn) # tensorflow.compat.v1.sparse_to_dense # --------------- def _test_sparse_to_dense(sparse_indices, sparse_values, default_value, output_shape): with tf.Graph().as_default(): indices = tf.placeholder( shape=sparse_indices.shape, dtype=str(sparse_indices.dtype), name="indices" ) values = tf.placeholder( shape=sparse_values.shape, dtype=str(sparse_values.dtype), name="values" ) oshape = tf.constant(output_shape, shape=output_shape.shape, dtype=str(output_shape.dtype)) if default_value == None: output = tf.sparse_to_dense(indices, oshape, values) compare_tf_with_tvm( [sparse_indices, sparse_values], ["indices:0", "values:0"], output.name ) else: dv = tf.placeholder(shape=(), dtype=str(default_value.dtype), name="default_value") output = tf.sparse_to_dense(indices, oshape, values, dv) compare_tf_with_tvm( [sparse_indices, sparse_values, default_value], ["indices:0", "values:0", "default_value:0"], output.name, ) def test_forward_sparse_to_dense(): # scalar _test_sparse_to_dense( sparse_indices=np.int32(1), sparse_values=np.int32(3), default_value=np.int32(0), output_shape=np.array([5]).astype("int32"), ) # vector _test_sparse_to_dense( sparse_indices=np.array([0, 1, 4]).astype("int32"), sparse_values=np.array([3, 3, 3]).astype("int32"), default_value=np.int32(0), output_shape=np.array([5]).astype("int32"), ) # vector nXd _test_sparse_to_dense( sparse_indices=np.array([[0, 0], [1, 2]]).astype("int32"), sparse_values=np.array([1, 2]).astype("int32"), default_value=np.int32(0), output_shape=np.array([3, 4]).astype("int32"), ) _test_sparse_to_dense( sparse_indices=np.array([[0, 0, 0], [1, 2, 3]]).astype("int32"), sparse_values=np.array([1, 2]).astype("int32"), default_value=np.int32(4), output_shape=np.array([2, 3, 4]).astype("int32"), ) # floats _test_sparse_to_dense( sparse_indices=np.array([0, 1, 4]).astype("int32"), sparse_values=np.array([3.1, 3.1, 3.1]).astype("float32"), default_value=np.float32(3.5), output_shape=np.array([5]).astype("int32"), ) # default value not specified _test_sparse_to_dense( sparse_indices=np.array([0, 1, 4]).astype("int32"), sparse_values=np.array([3.1, 3.1, 3.1]).astype("float32"), default_value=None, output_shape=np.array([5]).astype("int32"), ) ####################################################################### # tensorflow.sparse.to_dense # --------------- def _test_sparse_to_dense_v2(indices, values, A_shape, dtype, default_value=None): with tf.Graph().as_default(): A_sp = tf.sparse.SparseTensor(indices=indices, values=values, dense_shape=A_shape) result = tf.sparse.to_dense(A_sp, default_value=default_value) compare_tf_with_tvm([], [], result.name) def test_forward_sparse_to_dense_v2(): _test_sparse_to_dense_v2([[1]], [3.0], [5], "float32") _test_sparse_to_dense_v2([[1]], [3.0], [5], "float32", 0.3) _test_sparse_to_dense_v2([[0, 0], [1, 2]], [4.0, 8.0], [3, 4], "float32") _test_sparse_to_dense_v2([[0, 0], [1, 2]], [4.0, 8.0], [3, 4], "float32", 1.3) _test_sparse_to_dense_v2([[0, 0], [1, 3], [4, 3]], [3.0, 6.0, 9.0], [5, 5], "float32") _test_sparse_to_dense_v2([[0, 0], [1, 3], [4, 3]], [3.0, 6.0, 9.0], [5, 5], "float32", 1.9) ####################################################################### # tensorflow.sparse.add # ---------------------------------- def _test_sparse_add(indices, values, A_shape, B_shape, dtype, flip=False): """One iteration of tf.sparse.add""" # TODO(ANSHUMAN87): support cuda # TODO(ANSHUMAN87): support both sparse input case with tf.Graph().as_default(): A_sp = tf.sparse.SparseTensor( indices=indices, values=np.array(values).astype(dtype), dense_shape=A_shape ) B = tf.placeholder(shape=B_shape, dtype=dtype, name="B") # TODO(ANSHUMAN87): support user input threashold values if flip: result = tf.sparse.add(B, A_sp, threshold=0) else: result = tf.sparse.add(A_sp, B, threshold=0) B_np = np.random.uniform(high=5.0, size=B_shape).astype(dtype) compare_tf_with_tvm([B_np], [B.name], result.name, no_gpu=True) def test_sparse_add(): """sparse.add op test""" ################################################################### # # In order to create a SparseTensor, it requires 3 input as below: # SparseTensor(indices=[[0, 0], [1, 2]], values=[1, 2], dense_shape=[3, 4]) # # Above Sparse can be represented in Dense as below : # [[1, 0, 0, 0] # [0, 0, 2, 0] # [0, 0, 0, 0]] # # ------------------------------------------------------------------ for dtype_inp in ["float32", "float64", "int32"]: _test_sparse_add([[0, 0], [1, 2]], [4.0, 8.0], [3, 4], [3, 4], dtype_inp) _test_sparse_add([[0, 0], [1, 2]], [4.0, 8.0], [3, 4], [3, 4], dtype_inp, True) _test_sparse_add([[0, 0], [1, 3], [4, 3]], [3.0, 6.0, 9.0], [5, 5], [5, 5], dtype_inp) _test_sparse_add([[0, 0], [1, 3], [4, 3]], [3.0, 6.0, 9.0], [5, 5], [5, 5], dtype_inp, True) ####################################################################### # StridedSlice # ------------ def _test_stridedslice( ip_shape, begin, end, stride, dtype, begin_mask=0, end_mask=0, new_axis_mask=0, shrink_axis_mask=0, ellipsis_mask=0, ): """One iteration of a Stridedslice""" tf.reset_default_graph() np_data = np.random.uniform(size=ip_shape).astype(dtype) with tf.Graph().as_default(): if len(ip_shape) == 0: in_data = tf.constant(np_data, dtype) else: in_data = tf.placeholder(dtype, ip_shape, name="in_data") tf.strided_slice( in_data, begin, end, stride, begin_mask=begin_mask, end_mask=end_mask, new_axis_mask=new_axis_mask, shrink_axis_mask=shrink_axis_mask, ellipsis_mask=ellipsis_mask, name="strided_slice", ) if len(ip_shape) == 0: compare_tf_with_tvm(None, "", "strided_slice:0") else: compare_tf_with_tvm(np_data, "in_data:0", "strided_slice:0") def test_forward_stridedslice(): """test StridedSlice""" _test_stridedslice([], [0], [0], [1], "float32", new_axis_mask=1) _test_stridedslice([2], [1], [1], [1], "float32", shrink_axis_mask=1) _test_stridedslice([2, 1], [0], [1], [1], "float32", shrink_axis_mask=1) _test_stridedslice([2, 3, 4], [0], [1], [1], "float32", shrink_axis_mask=8) _test_stridedslice([3, 4, 3], [1, -1, 0], [4, -5, 3], [2, -1, 1], "float32") _test_stridedslice([3, 4, 3], [1, 0], [4, 3], [2, 1], "float32", ellipsis_mask=8) _test_stridedslice([3, 4, 3], [1, 0], [4, 2], [2, 1], "float32", ellipsis_mask=2) _test_stridedslice([3, 4, 5, 3], [1, 0], [4, 2], [2, 1], "float32", ellipsis_mask=2) _test_stridedslice([3, 4, 5, 3], [1, 0, 1], [4, 2, 2], [2, 1, 1], "float32", ellipsis_mask=2) _test_stridedslice([3, 4, 3], [1, 1, 0], [4, 4, 2], [2, 1, 1], "float32", new_axis_mask=5) _test_stridedslice( [3, 4, 3], [1, 1, 1], [4, 4, 1], [2, 1, 1], "float32", ellipsis_mask=2, new_axis_mask=4 ) _test_stridedslice( [6, 4, 5], [1, 1, 1], [6, 3, 4], [2, 1, 1], "float32", ellipsis_mask=2, new_axis_mask=5 ) _test_stridedslice( [3, 4, 3], [1, 1, 2], [4, 4, 3], [2, 1, 1], "float32", ellipsis_mask=4, new_axis_mask=2 ) _test_stridedslice( [3, 4, 3], [1, 1, 2], [4, 4, 3], [2, 1, 1], "float32", ellipsis_mask=2, new_axis_mask=3 ) _test_stridedslice( [3, 4, 3], [1, 1, 0], [4, 4, 1], [2, 1, 1], "float32", ellipsis_mask=2, new_axis_mask=3 ) _test_stridedslice( [3, 4, 3], [1, 1, 2], [4, 4, 3], [2, 1, 1], "float32", ellipsis_mask=2, new_axis_mask=2 ) _test_stridedslice((3, 4), [1, 0], [4, 4], [1, 1], "float32", shrink_axis_mask=2) _test_stridedslice( [3, 4, 3], [1, 1, 0], [4, 4, 3], [2, 1, 1], "float32", shrink_axis_mask=2, new_axis_mask=2 ) _test_stridedslice( [3, 4, 3], [1, 1, 0], [4, 4, 3], [2, 1, 1], "float32", shrink_axis_mask=1, new_axis_mask=2 ) _test_stridedslice( [3, 4, 3], [1, 1, 0], [4, 4, 3], [2, 1, 1], "float32", shrink_axis_mask=2, new_axis_mask=1 ) _test_stridedslice( [3, 4, 5, 4, 5, 6], [0, 0], [2, 3], [1, 1], "float32", shrink_axis_mask=5, new_axis_mask=1 ) _test_stridedslice( [3, 4, 5, 4, 5, 6], [0, 0, 1, 2, 1], [2, 3, 4, 5, 3], [1, 1, 2, 2, 1], "float32", shrink_axis_mask=5, new_axis_mask=1, ellipsis_mask=2, begin_mask=8, end_mask=8, ) _test_stridedslice( [3, 4, 5, 4, 5, 6], [0, 0, 1, 2, 1], [2, 3, 4, 5, 3], [1, 1, 2, 2, 1], "float32", shrink_axis_mask=8, new_axis_mask=1, ellipsis_mask=2, begin_mask=5, end_mask=5, ) _test_stridedslice( [3, 4, 5, 4, 5, 6], [0, 0, 1, 2, 1], [2, 3, 4, 5, 3], [1, 1, 2, 2, 1], "float32", shrink_axis_mask=16, new_axis_mask=1, ellipsis_mask=2, begin_mask=5, end_mask=5, ) _test_stridedslice( [3, 4, 5, 4, 5, 6], [1, 2, 0, -3], [4, 5, 3, 3], [2, 2, 1, 1], "float32", shrink_axis_mask=8, new_axis_mask=1, ellipsis_mask=2, begin_mask=5, end_mask=8, ) _test_stridedslice( [1, 13, 13, 3, 2], [0, 0], [1, 1], [1, -1], "float32", ellipsis_mask=1, begin_mask=2, end_mask=2, ) ####################################################################### # FloorDiv, RealDiv # ----------------- def _test_forward_divide(ip_shape, dtype): np_numer = np.random.uniform(-100, 100, size=ip_shape).astype(dtype) np_denomin = np.random.uniform(1, 100, size=ip_shape).astype(dtype) tf.reset_default_graph() with tf.Graph().as_default(): numerator = tf.placeholder(dtype, ip_shape, name="numer") denominator = tf.placeholder(dtype, ip_shape, name="denomin") tf.math.divide(numerator, denominator, name="RealDiv") compare_tf_with_tvm([np_numer, np_denomin], ["numer:0", "denomin:0"], "RealDiv:0") def _test_forward_floordiv(ip_shape, dtype): np_numer = np.random.uniform(1, 100, size=ip_shape).astype(dtype) tf.reset_default_graph() with tf.Graph().as_default(): numerator = tf.placeholder(dtype, ip_shape, name="numer") tf.math.floordiv(numerator, tf.constant(5, dtype=dtype), name="FloorDiv") compare_tf_with_tvm([np_numer], ["numer:0"], "FloorDiv:0") def test_forward_divide(): """test FloorDiv, RealDiv""" _test_forward_divide((4,), "int32") _test_forward_divide((4, 3, 7), "float32") _test_forward_floordiv((4, 3, 7), "float32") _test_forward_floordiv((4, 3, 7), "int32") ####################################################################### # FloorMod # -------- def _test_forward_floormod(in_shape, if_shape, dtype): np_numer = np.random.uniform(1, 100, size=in_shape).astype(dtype) np_factor = np.random.uniform(1, 100, size=if_shape).astype(dtype) tf.reset_default_graph() with tf.Graph().as_default(): numerator = tf.placeholder(dtype, in_shape, name="numer") factor = tf.placeholder(dtype, if_shape, name="factor") tf.floormod(numerator, factor, name="FloorMod") compare_tf_with_tvm([np_numer, np_factor], ["numer:0", "factor:0"], "FloorMod:0") def test_forward_floormod(): """test FloorMod""" _test_forward_floormod((10,), (10,), "float32") _test_forward_floormod((8, 2), (1,), "float32") _test_forward_floormod((4, 3, 7), (4, 3, 7), "float32") _test_forward_floormod((4, 3, 7), (4, 3, 7), "int32") ####################################################################### # TruncateMod # ----------- def _test_forward_truncatemod(ip_shape, dtype): np_data_1 = np.random.uniform(-100, 100, size=ip_shape).astype(dtype) np_data_2 = np.random.uniform(1, 10, size=ip_shape).astype(dtype) tf.reset_default_graph() with tf.Graph().as_default(): in_data_1 = tf.placeholder(dtype, ip_shape, name="in_data_1") in_data_2 = tf.placeholder(dtype, ip_shape, name="in_data_2") tf.truncatemod(in_data_1, in_data_2, name="truncatemod") compare_tf_with_tvm([np_data_1, np_data_2], ["in_data_1:0", "in_data_2:0"], "truncatemod:0") def test_forward_truncatemod(): """test TruncateMod""" _test_forward_truncatemod((4, 3, 7), "int32") ####################################################################### # Gather, GatherV2 # -------------------------- def _test_gather(ip_shape, indice_shape, indice_value, axis, batch_dims, dtype): """One iteration of a GatherV2""" tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder(dtype, ip_shape, name="in_data") indices = tf.placeholder("int32", indice_shape, name="indices") out = tf.gather(in_data, indices, axis=axis, batch_dims=batch_dims) np_data = np.random.uniform(1, 10, size=ip_shape).astype(dtype) def _fill_indices(indice_value): indices = np.array(ip_shape, dtype=dtype) if isinstance(indice_value, int): indices = np.array([indice_value], dtype="int32") else: indices = np.asarray(indice_value, dtype="int32") return indices np_indices = _fill_indices(indice_value) compare_tf_with_tvm([np_data, np_indices], ["in_data:0", "indices:0"], out.name) def test_forward_gather(): """test Gather/GatherV2 layer""" _test_gather((4,), (1,), 1, 0, 1, "int32") _test_gather((4,), (1,), 1, 0, 0, "float32") _test_gather((1, 4), (1,), [0], 0, 0, "int32") _test_gather((4,), (1, 2, 2), [[[1, 0], [0, 1]]], 0, 0, "float32") _test_gather((2, 2), (1, 2, 2), [[[1, 0], [0, 1]]], 0, 0, "int32") _test_gather((2, 2), (1, 2, 2), [[[1, 0], [0, 1]]], 1, 0, "int32") _test_gather((2, 2), (1, 2, 2), [[[1, 0], [0, 1]]], 0, 0, "float32") _test_gather((3, 3, 3), (1, 1, 2), [[[1, 0]]], 0, 0, "int32") _test_gather((3, 3, 3), (1, 1, 2), [[[1, 0]]], 2, 0, "int32") _test_gather((4, 3, 5, 6), (1, 4), [[2, 1, 0, 0]], 0, 0, "float32") _test_gather((2, 2), (2, 2), [[0, 0], [0, 0]], 1, 1, "float32") _test_gather( (2, 2, 3, 6), (2, 2, 3), [[[1, 1, 0], [0, 0, 1]], [[0, 1, 0], [1, 0, 1]]], 2, 2, "float32" ) _test_gather( (2, 2, 3, 6), (2, 2, 3), [[[1, 1, 0], [0, 0, 1]], [[0, 1, 0], [1, 0, 1]]], 3, 1, "float32" ) _test_gather( (2, 2, 3, 6), (2, 2, 3), [[[1, 1, 0], [0, 0, 1]], [[0, 1, 0], [1, 0, 1]]], 3, 2, "float32" ) _test_gather( (2, 2, 3, 6), (2, 2, 3), [[[1, 1, 0], [0, 0, 1]], [[0, 1, 0], [1, 0, 1]]], 3, 0, "float32" ) ####################################################################### # GatherND # -------------------------- def _test_gather_nd(ip_shape, indice_value, dtype): """test operator GatherNd""" np_data = np.random.uniform(1, 100, size=ip_shape).astype(dtype) tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder(dtype, ip_shape, name="in_data") tf.gather_nd(in_data, indices=indice_value, name="gather_nd") compare_tf_with_tvm([np_data], ["in_data:0"], "gather_nd:0") def test_forward_gather_nd(): """test operator GatherNd""" _test_gather_nd((2, 2), [[0, 0], [1, 1]], "float32") _test_gather_nd((2, 2, 2), [[1, 0, 0], [0, 0, 0]], "float32") _test_gather_nd((4,), [1], "float32") _test_gather_nd((4,), [1], "int32") _test_gather_nd((1, 4), [0, 3], "int32") _test_gather_nd((2, 2), [[[1, 0], [0, 1]]], "int32") _test_gather_nd((2, 2), [[[1, 0], [0, 1]]], "float32") _test_gather_nd((3, 3, 3), [[[1, 0]]], "int32") _test_gather_nd((3, 3, 3), [[[1, 0]]], "int32") _test_gather_nd((4, 3, 5, 6), [[2, 1, 0, 0]], "float32") _test_gather_nd((3, 3, 3), [[[2, 1]]], "int32") ####################################################################### # BiasAdd # ------- def test_forward_bias_add(): """test Op BiasAdd""" def check_bias_add(lh_shpae, rh_shape, dtype): tf.reset_default_graph() lh_data = np.random.uniform(size=lh_shpae).astype(dtype) rh_data = np.random.uniform(size=rh_shape).astype(dtype) with tf.Graph().as_default(): lft_data = tf.placeholder(dtype, name="lft_data") rgt_data = tf.placeholder(dtype, name="rgt_data") tf.nn.bias_add(lft_data, rgt_data, name="BiasAdd") compare_tf_with_tvm([lh_data, rh_data], ["lft_data:0", "rgt_data:0"], "BiasAdd:0") check_bias_add((10, 8, 16, 32), (32,), dtype="int32") check_bias_add((10, 20), (20,), dtype="float32") ####################################################################### # Split # ----- def _test_split(in_shape, axis, num_or_size_splits, dtype): np_data = np.random.uniform(-5, 5, size=in_shape).astype(dtype) """ One iteration of a Split """ tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder(dtype, in_shape, name="in_data") num_split = ( len(num_or_size_splits) if isinstance(num_or_size_splits, list) else num_or_size_splits ) split = tf.split(in_data, num_or_size_splits, axis=axis) relu = [tf.nn.relu(i) for i in split] compare_tf_with_tvm([np_data], ["in_data:0"], [n.name for n in relu]) # and now test together with concat tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder(dtype, in_shape, name="in_data") splitted = tf.split(in_data, num_or_size_splits, axis=axis) concat = tf.concat(splitted, axis) compare_tf_with_tvm([np_data], "in_data:0", concat.name) def test_forward_split(): """test split layer""" # rank 1 _test_split((3,), 0, 1, "float32") _test_split((3,), 0, 3, "float32") _test_split((6,), 0, 3, "float32") # rank 2 _test_split((6, 2), 0, 3, "float32") _test_split((2, 6), 1, 6, "float32") # rank 3 _test_split((6, 2, 4), 0, 2, "int32") _test_split((2, 6, 4), 1, 3, "float32") _test_split((2, 4, 6), 2, 1, "float32") # rank 4 _test_split((6, 1, 3, 5), 0, 3, "float32") _test_split((1, 6, 3, 5), 1, 3, "float32") _test_split((1, 3, 6, 5), 2, 3, "float32") _test_split((1, 3, 5, 6), 3, 3, "float32") # split along negative axis _test_split((6, 1, 3, 5), -4, 3, "float32") _test_split((1, 6, 3, 5), -3, 3, "float32") _test_split((1, 3, 6, 5), -2, 3, "float32") _test_split((1, 3, 5, 6), -1, 3, "float32") # size_splits list _test_split((6,), 0, [1, 2, 3], "int32") _test_split((3, 6, 4), -2, [1, 4, 1], "float32") ###################################################################### # TopKV2 # ------ def _test_forward_top_k_v2(in_shape, k): np_data = np.random.uniform(-100, 100, size=in_shape).astype("float32") tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder("float32", in_shape, name="in_data") tf.math.top_k(in_data, k, name="TopK") compare_tf_with_tvm([np_data], ["in_data:0"], "TopK:0") def test_forward_top_k_v2(): _test_forward_top_k_v2((3,), 1) _test_forward_top_k_v2((3,), 3) _test_forward_top_k_v2((3, 5, 7), 3) _test_forward_top_k_v2((3, 5, 7), 3) ####################################################################### # Unstack # ------- def _test_unstack(ip_shape, axis, dtype): np_data = np.random.uniform(-5, 5, size=ip_shape).astype(dtype) tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder(dtype, ip_shape, name="in_data") unstack = tf.unstack(in_data, axis=axis) compare_tf_with_tvm([np_data], ["in_data:0"], [n.name for n in unstack]) tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder(dtype, ip_shape, name="in_data") tf.stack(tf.unstack(in_data, axis=axis), axis=axis) compare_tf_with_tvm([np_data], ["in_data:0"], "stack:0") def test_forward_unstack(): """test unstack layer""" _test_unstack((6,), 0, "int32") _test_unstack((2, 6), 1, "float64") # negative axis _test_unstack((1, 4), -1, "int32") _test_unstack((3, 6, 4), -2, "float32") ####################################################################### # Tile # ---- def _test_tile(in_shape, multiples, dtype): np_data = np.random.uniform(-5, 5, size=in_shape).astype(dtype) tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder(dtype, in_shape, name="in_data") tf.tile(in_data, multiples=multiples, name="tile") compare_tf_with_tvm([np_data], ["in_data:0"], "tile:0") def test_forward_tile(): """test Tile""" _test_tile((2,), (3,), "int32") _test_tile((2, 2), (2, 3), "float32") _test_tile((2, 4, 6), (6, 7, 8), "float64") ####################################################################### # ClipByValue # ----------- def _test_forward_clip_by_value(ip_shape, clip_value_min, clip_value_max, dtype): tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder(dtype, ip_shape, name="in_data") tf.clip_by_value(in_data, clip_value_min, clip_value_max, name="ClipByValue") np_data = np.random.uniform(-100, 100, size=ip_shape).astype(dtype) compare_tf_with_tvm([np_data], ["in_data:0"], "ClipByValue:0") def test_forward_clip_by_value(): """test ClipByValue op""" if tf.__version__ < LooseVersion("1.9"): _test_forward_clip_by_value((4,), 0.1, 5.0, "float32") _test_forward_clip_by_value((4, 4), 1, 5, "int32") ####################################################################### # Multi Input to graph # -------------------- def test_forward_multi_input(): with tf.Graph().as_default(): in1 = tf.placeholder(tf.int32, shape=[3, 3], name="in1") in2 = tf.placeholder(tf.int32, shape=[3, 3], name="in2") in3 = tf.placeholder(tf.int32, shape=[3, 3], name="in3") in4 = tf.placeholder(tf.int32, shape=[3, 3], name="in4") out1 = tf.add(in1, in2, name="out1") out2 = tf.subtract(in3, in4, name="out2") out = tf.multiply(out1, out2, name="out") in_data = np.arange(9, dtype="int32").reshape([3, 3]) compare_tf_with_tvm( [in_data, in_data, in_data, in_data], ["in1:0", "in2:0", "in3:0", "in4:0"], "out:0" ) ####################################################################### # Multi Output to Graph # --------------------- def test_forward_multi_output(): with tf.Graph().as_default(): in1 = tf.placeholder(tf.int32, shape=[3, 3], name="in1") in2 = tf.placeholder(tf.int32, shape=[3, 3], name="in2") in3 = tf.placeholder(tf.int32, shape=[3, 3], name="in3") in4 = tf.placeholder(tf.int32, shape=[3, 3], name="in4") out1 = tf.add(in1, in2, name="out1") out2 = tf.subtract(in3, in4, name="out2") in_data = np.arange(9, dtype="int32").reshape([3, 3]) in_data = [in_data] * 4 in_name = ["in1:0", "in2:0", "in3:0", "in4:0"] out_name = ["out1:0", "out2:0"] out_node = [out.strip(":0") for out in out_name] in_node = [inp.strip(":0") for inp in in_name] with tf.Session() as sess: final_graph_def = tf.graph_util.convert_variables_to_constants( sess, sess.graph.as_graph_def(add_shapes=True), out_node, ) tf_output = run_tf_graph(sess, in_data, in_name, out_name) tvm_output = run_tvm_graph( final_graph_def, in_data, in_node, target="llvm", out_names=out_node, num_output=2 ) for i in range(len(tf_output)): tvm.testing.assert_allclose(tf_output[i], tvm_output[i], atol=1e-5, rtol=1e-5) ####################################################################### # Resize Bilinear, Nearest_Neighbor # --------------------------------- def _test_resize_bilinear(in_shape, to_shape, align_corners): """One iteration of resize bilinear""" data = np.random.uniform(size=in_shape).astype("float32") shape_data = np.array(to_shape).astype("int32") with tf.Graph().as_default(): in_data = array_ops.placeholder(shape=data.shape, dtype=data.dtype) shape_data = constant_op.constant( shape_data, shape=shape_data.shape, dtype=shape_data.dtype ) tf.image.resize_bilinear(in_data, shape_data, align_corners=align_corners) compare_tf_with_tvm(data, "Placeholder:0", "ResizeBilinear:0") def _test_resize_bilinear_from_tensor(in_shape, align_corners): """One iteration of resize bilinear with non-constant output shape, requires value inference to get proper output shape.""" data = np.random.uniform(size=in_shape).astype("float32") with tf.Graph().as_default(): in_data = array_ops.placeholder( shape=[in_shape[0], None, None, in_shape[3]], dtype=data.dtype ) to_shape = tf.shape(in_data)[1:3] tf.image.resize_bilinear(in_data, to_shape, align_corners=align_corners) compare_tf_with_tvm(data, "Placeholder:0", "ResizeBilinear:0") def _test_resize_nearest_neighbor(in_shape, to_shape): """One iteration of resize nearest neighbor""" data = np.random.uniform(size=in_shape).astype("float32") shape_data = np.array(to_shape).astype("int32") with tf.Graph().as_default(): in_data = array_ops.placeholder(shape=data.shape, dtype=data.dtype) shape_data = constant_op.constant( shape_data, shape=shape_data.shape, dtype=shape_data.dtype ) tf.image.resize_nearest_neighbor(in_data, shape_data, name="resize_nearest_neighbor") compare_tf_with_tvm(data, "Placeholder:0", "resize_nearest_neighbor:0") def _test_resize_nearest_neighbor_dynamic_shape(in_shape, scale): """One iteration of resize nearest neighbor for graph with dynamic input shape""" data = np.random.uniform(size=in_shape).astype("float32") with tf.Graph().as_default(): in_data = array_ops.placeholder(shape=None, dtype=data.dtype) # multiply input shape by scale factor new_shape = tf.shape(in_data)[1:3] * tf.constant(scale, dtype=tf.int32) tf.image.resize_nearest_neighbor(in_data, new_shape, name="resize_nearest_neighbor") compare_tf_with_tvm(data, "Placeholder:0", "resize_nearest_neighbor:0") def test_forward_resize(): """Resize Bilinear, Nearest_Neighbor""" # TF default layout is NHWC _test_resize_bilinear((4, 32, 32, 3), [50, 50], False) _test_resize_bilinear((6, 32, 32, 3), [20, 20], True) _test_resize_bilinear_from_tensor((4, 32, 32, 3), False) _test_resize_bilinear_from_tensor((6, 50, 50, 3), True) _test_resize_nearest_neighbor((6, 32, 32, 3), [20, 20]) _test_resize_nearest_neighbor_dynamic_shape((1, 16, 16, 3), scale=[2, 2]) ####################################################################### # BroadcastArgs # ----------- def _test_broadcast_args(in_shape_1, in_shape_2): """One iteration of broadcast_args""" shape_1 = np.array(in_shape_1).astype("int32") shape_2 = np.array(in_shape_2).astype("int32") with tf.Graph().as_default(): shape_1 = constant_op.constant(shape_1, shape=shape_1.shape, dtype=shape_1.dtype) shape_2 = constant_op.constant(shape_2, shape=shape_2.shape, dtype=shape_2.dtype) tf.raw_ops.BroadcastArgs(s0=shape_1, s1=shape_2) compare_tf_with_tvm(None, "", "BroadcastArgs:0", opt_level=0) def test_forward_broadcast_args(): """Resize Bilinear""" _test_broadcast_args((4, 1, 32, 32), [4, 8, 32, 32]) _test_broadcast_args((6, 32, 32, 1), [6, 32, 32, 16]) _test_broadcast_args((32, 32, 16), [6, 32, 32, 16]) ####################################################################### # BroadcastTo # ----------- def _test_broadcast_to(in_shape, to_shape): """One iteration of broadcast_to""" data = np.random.uniform(size=in_shape).astype("float32") shape_data = np.array(to_shape).astype("int32") with tf.Graph().as_default(): in_data = array_ops.placeholder(shape=data.shape, dtype=data.dtype) shape_data = constant_op.constant( shape_data, shape=shape_data.shape, dtype=shape_data.dtype ) tf.broadcast_to(in_data, shape_data) compare_tf_with_tvm(data, "Placeholder:0", "BroadcastTo:0", opt_level=0) def _test_broadcast_to_from_tensor(in_shape): """One iteration of broadcast_to with unknown shape at graph build""" data = np.random.uniform(size=in_shape).astype("float32") with tf.Graph().as_default(): in_data = array_ops.placeholder(shape=[None], dtype=data.dtype) shape_data = tf.multiply(tf.shape(in_data), 32) tf.broadcast_to(in_data, shape_data) compare_tf_with_tvm(data, "Placeholder:0", "BroadcastTo:0") def test_forward_broadcast_to(): """Resize Bilinear""" _test_broadcast_to((4, 1, 32, 32), [4, 8, 32, 32]) _test_broadcast_to((6, 32, 32, 1), [6, 32, 32, 16]) _test_broadcast_to_from_tensor((1)) ####################################################################### # Fill # ---- def _test_fill(in_shape): """Use the fill op to create a tensor of ones with non-constant shape.""" with tf.Graph().as_default(): tf.ones(shape=in_shape, dtype="float32") compare_tf_with_tvm(in_shape, [], "ones:0", opt_level=1) def _test_fill_from_tensor(in_shape): """Use the fill op to create a tensor of ones with non-constant shape. Some extra ops need to be added here to prevent the graph from being fully constant and folded away.""" data = np.random.uniform(size=in_shape).astype("float32") with tf.Graph().as_default(): in_data = array_ops.placeholder( shape=[in_shape[0], in_shape[1], None, None], dtype=data.dtype ) x = tf.ones(shape=2 * tf.shape(in_data), dtype=data.dtype) y = tf.math.add(in_data, tf.reduce_mean(x), name="out1") compare_tf_with_tvm(data, "Placeholder:0", "out1:0") def _test_fill_symbolic_inputs(in_shape_data, in_value_data, dtype): with tf.Graph().as_default(): in_shape = tf.placeholder(shape=[in_shape_data.shape[0]], dtype=in_shape_data.dtype) in_value = tf.placeholder(shape=(), dtype=dtype) out = tf.fill(in_shape, in_value) for mode in ["debug", "vm"]: compare_tf_with_tvm( [in_shape_data, in_value_data], [in_shape.name, in_value.name], out.name, mode=mode ) def test_forward_fill(): """Resize Bilinear""" _test_fill((32)) _test_fill((6, 32, 64, 64)) _test_fill_from_tensor((6, 32, 64, 64)) _test_fill_symbolic_inputs(np.array((2,)), np.int32(9), tf.int32) _test_fill_symbolic_inputs(np.array((2, 3)), 9, tf.int64) _test_fill_symbolic_inputs(np.array((2, 3, 4)), np.float32(9.0), tf.float32) ####################################################################### # Crop to bounding box # -------------------- def _test_crop(in_shape, off_h, off_w, tar_h, tar_w): """Crop to bounding box""" data = np.random.uniform(size=in_shape).astype("float32") with tf.Graph().as_default(): in_data = array_ops.placeholder(shape=data.shape, dtype=data.dtype) tf.image.crop_to_bounding_box(in_data, off_h, off_w, tar_h, tar_w) compare_tf_with_tvm(data, "Placeholder:0", "crop_to_bounding_box/Slice:0") def test_forward_crop(): """Crop to bounding box""" _test_crop((1, 224, 224, 3), 20, 20, 120, 120) ####################################################################### # CropAndResize # ------------- def _test_forward_crop_and_resize( img_shape, boxes, box_idx, crop_size, extrapolation_value=0.0, method="bilinear", dtype="float32", ): image = np.random.uniform(0, 10, size=img_shape).astype(dtype) tf.reset_default_graph() with tf.Graph().as_default(): in_data = array_ops.placeholder(dtype, image.shape, name="in_data") tf.image.crop_and_resize( in_data, boxes=boxes, box_ind=box_idx, crop_size=crop_size, method=method, extrapolation_value=extrapolation_value, name="crop_and_resize", ) compare_tf_with_tvm([image], ["in_data:0"], "crop_and_resize:0") def test_forward_crop_and_resize(): """CropAndResize""" _test_forward_crop_and_resize([1, 6, 6, 3], [[0, 0, 1, 1]], [0], [3, 3]) _test_forward_crop_and_resize([1, 6, 6, 3], [[0, 0, 1, 1]], [0], [3, 3], 0.2) _test_forward_crop_and_resize([1, 6, 6, 3], [[0, 0, 1, 1]], [0], [3, 3], 0.2, "nearest") _test_forward_crop_and_resize([1, 11, 11, 3], [[0.3, 0.3, 1, 1]], [0], [21, 21]) _test_forward_crop_and_resize([1, 41, 41, 3], [[0.2, 0.4, 0.8, 0.8]], [0], [21, 11]) _test_forward_crop_and_resize([1, 100, 100, 3], [[0, 0, 0.9, 0.9]], [0], [30, 30]) _test_forward_crop_and_resize([1, 224, 224, 3], [[0.1, 0.2, 1, 1]], [0], [9, 9]) _test_forward_crop_and_resize([1, 249, 249, 3], [[0, 0, 1, 1]], [0], [9, 9]) _test_forward_crop_and_resize([1, 201, 301, 3], [[0.2, 0.3, 0.7, 0.8]], [0], [51, 51]) _test_forward_crop_and_resize( img_shape=[10, 11, 11, 3], boxes=[[0, 0, 0.9, 0.9], [0.2, 0.2, 0.8, 0.8]], box_idx=[0, 1], crop_size=[5, 5], ) _test_forward_crop_and_resize( img_shape=[20, 576, 576, 3], boxes=[[0, 0, 1, 1], [0, 0, 0.8, 0.8], [0.1, 0.2, 0.9, 1], [0.2, 0, 1, 1]], box_idx=[1, 0, 2, 3], crop_size=[24, 24], extrapolation_value=0.3, ) _test_forward_crop_and_resize( img_shape=[20, 229, 229, 3], boxes=[[0, 0, 0.9, 0.9], [0.3, 0.3, 1, 1], [0.2, 0.1, 0.7, 0.8], [0, 0, 1, 1]], box_idx=[3, 0, 2, 1], crop_size=[58, 58], extrapolation_value=0.2, method="nearest", ) ####################################################################### # Non Max Suppression # ------------------- def _test_forward_nms_v3( bx_shape, score_shape, iou_threshold, score_threshold, out_size, dtype="float32" ): boxes = np.random.uniform(0, 10, size=bx_shape).astype(dtype) scores = np.random.uniform(size=score_shape).astype(dtype) max_output_size = np.int32(out_size) tf.reset_default_graph() in_data_1 = tf.placeholder(dtype, boxes.shape, name="in_data_1") in_data_2 = tf.placeholder(dtype, scores.shape, name="in_data_2") in_data_3 = tf.placeholder(tf.int32, name="in_data_3") tf.image.non_max_suppression( boxes=in_data_1, scores=in_data_2, max_output_size=in_data_3, iou_threshold=iou_threshold, score_threshold=score_threshold, name="nms", ) compare_tf_with_tvm( [boxes, scores, max_output_size], ["in_data_1:0", "in_data_2:0", "in_data_3:0"], "nms/NonMaxSuppressionV3:0", mode="vm", ) compare_tf_with_tvm( [boxes, scores, max_output_size], ["in_data_1:0", "in_data_2:0", "in_data_3:0"], "nms/NonMaxSuppressionV3:0", mode="debug", ) def _test_forward_nms_v4( bx_shape, score_shape, iou_threshold, score_threshold, out_size, dtype="float32" ): boxes = np.random.uniform(0, 10, size=bx_shape).astype(dtype) scores = np.random.uniform(size=score_shape).astype(dtype) max_output_size = np.int32(out_size) tf.reset_default_graph() in_data_1 = tf.placeholder(dtype, boxes.shape, name="in_data_1") in_data_2 = tf.placeholder(dtype, scores.shape, name="in_data_2") in_data_3 = tf.placeholder(tf.int32, name="in_data_3") indices_padded, num_valid = tf.image.non_max_suppression_padded( boxes=in_data_1, scores=in_data_2, max_output_size=in_data_3, iou_threshold=iou_threshold, score_threshold=score_threshold, name="nms", pad_to_max_output_size=True, ) num_valid = tf.reshape(num_valid, shape=(-1,)) indices_padded = tf.reshape(indices_padded, shape=(-1,)) tf.slice(indices_padded, tf.constant([0]), num_valid, name="SlicedIndices") compare_tf_with_tvm( [boxes, scores, max_output_size], ["in_data_1:0", "in_data_2:0", "in_data_3:0"], ["nms/NonMaxSuppressionV4:1", "SlicedIndices:0"], mode="vm", ) compare_tf_with_tvm( [boxes, scores, max_output_size], ["in_data_1:0", "in_data_2:0", "in_data_3:0"], ["nms/NonMaxSuppressionV4:1", "SlicedIndices:0"], mode="debug", ) def _test_forward_nms_v5( bx_shape, score_shape, iou_threshold, score_threshold, out_size, dtype="float32" ): boxes = np.random.uniform(0, 10, size=bx_shape).astype(dtype) scores = np.random.uniform(size=score_shape).astype(dtype) max_output_size = np.int32(out_size) tf.reset_default_graph() in_data_1 = tf.placeholder(dtype, boxes.shape, name="in_data_1") in_data_2 = tf.placeholder(dtype, scores.shape, name="in_data_2") in_data_3 = tf.placeholder(tf.int32, name="in_data_3") tf.image.non_max_suppression_with_scores( boxes=in_data_1, scores=in_data_2, max_output_size=in_data_3, iou_threshold=iou_threshold, score_threshold=score_threshold, name="nms", ) compare_tf_with_tvm( [boxes, scores, max_output_size], ["in_data_1:0", "in_data_2:0", "in_data_3:0"], ["nms/NonMaxSuppressionV5:0", "nms/NonMaxSuppressionV5:1"], mode="vm", ) def test_forward_nms(): """NonMaxSuppressionV3,5""" for _test_forward_nms in [_test_forward_nms_v3, _test_forward_nms_v5]: _test_forward_nms((5, 4), (5,), 0.7, 0.5, 5) _test_forward_nms((20, 4), (20,), 0.5, 0.6, 10) _test_forward_nms((1000, 4), (1000,), 0.3, 0.7, 1000) _test_forward_nms((2000, 4), (2000,), 0.4, 0.6, 7) def _test_forward_combined_nms( bx_shape, score_shape, iou_threshold, score_threshold, out_size, total_size, clip_boxes=False, dtype="float32", ): boxes = np.random.uniform(-1, 2, size=bx_shape).astype(dtype) scores = np.random.uniform(size=score_shape).astype(dtype) max_output_size = np.int32(out_size) tf.reset_default_graph() in_data_1 = tf.placeholder(dtype, boxes.shape, name="in_data_1") in_data_2 = tf.placeholder(dtype, scores.shape, name="in_data_2") in_data_3 = tf.placeholder(tf.int32, name="in_data_3") tf.image.combined_non_max_suppression( boxes=in_data_1, scores=in_data_2, max_output_size_per_class=in_data_3, max_total_size=total_size, iou_threshold=iou_threshold, score_threshold=score_threshold, pad_per_class=False, clip_boxes=clip_boxes, name="nms", ) compare_tf_with_tvm( [boxes, scores, max_output_size], ["in_data_1:0", "in_data_2:0", "in_data_3:0"], [ "nms/CombinedNonMaxSuppression:0", "nms/CombinedNonMaxSuppression:1", "nms/CombinedNonMaxSuppression:2", "nms/CombinedNonMaxSuppression:3", ], ) def test_forward_combined_nms(): """CombinedNonMaxSuppression""" _test_forward_combined_nms((1, 64, 1, 4), (1, 64, 1), 0.7, 0.5, 64, 64) _test_forward_combined_nms((1, 32, 1, 4), (1, 32, 1), 0.7, 0.5, 10, 64) _test_forward_combined_nms((1, 32, 1, 4), (1, 32, 2), 0.7, 0.5, 32, 64) _test_forward_combined_nms((1, 64, 1, 4), (1, 64, 20), 0.7, 0.5, 64, 10) _test_forward_combined_nms((1, 64, 20, 4), (1, 64, 20), 0.7, 0.5, 64, 64, clip_boxes=True) _test_forward_combined_nms((2, 200, 1, 4), (2, 200, 1), 0.4, 0.6, 100, 100) _test_forward_combined_nms((2, 200, 1, 4), (2, 200, 10), 0.4, 0.2, 150, 1000) ####################################################################### # LSTM # ---- def _test_lstm_cell(batch_size, num_hidden, num_layers, forget_bias, dtype): """One iteration of a LSTM cell""" tf.reset_default_graph() input_size = num_hidden input_data = np.full((batch_size, input_size), 1.0, dtype=dtype) in_state_c = np.full((batch_size, num_hidden), 0.1, dtype=dtype) in_state_h = np.full((batch_size, num_hidden), 0.1, dtype=dtype) def _get_tensorflow_output(): with tf.Session() as sess: with variable_scope.variable_scope( "root", initializer=init_ops.constant_initializer(0.5) ): m0 = tf.placeholder(dtype, [batch_size, num_hidden], name="m0") m1 = tf.placeholder(dtype, [batch_size, num_hidden], name="m1") x = tf.placeholder(shape=(batch_size, input_size), dtype=dtype, name="input") g, ((out_m0, out_m1)) = tensorflow.contrib.rnn.LSTMBlockCell( num_hidden, forget_bias=forget_bias )(x, (m0, m1)) sess.run([variables.global_variables_initializer()]) res = sess.run( [g, out_m0, out_m1], { x.name: np.array([[1.0, 1.0]]), m0.name: in_state_c, m1.name: in_state_h, }, ) graph_def = sess.graph.as_graph_def(add_shapes=True) final_graph_def = graph_util.convert_variables_to_constants( sess, graph_def, ["root/lstm_cell/LSTMBlockCell"] ) return final_graph_def, res graph_def, tf_out = _get_tensorflow_output() tvm_output = run_tvm_graph( graph_def, [input_data, in_state_c, in_state_h], ["root/input", "root/m0", "root/m1"], num_output=7, ) assert isinstance(tvm_output, list) tvm.testing.assert_allclose(tf_out[0], tvm_output[6], rtol=1e-3, atol=1e-3) tvm.testing.assert_allclose(tf_out[1], tvm_output[1], rtol=1e-3, atol=1e-3) def test_forward_lstm(): """test LSTM block cell""" if package_version.parse(tf.VERSION) < package_version.parse("2.0.0"): # in 2.0, tf.contrib.rnn.LSTMBlockCell is removed _test_lstm_cell(1, 2, 1, 0.5, "float32") ####################################################################### # Pack # --- def _test_pack(axis, shape, **kwargs): a = np.arange(np.prod(shape), dtype=np.float32).reshape(shape) b = np.arange(np.prod(shape), dtype=np.float32).reshape(shape) with tf.Graph().as_default(): tf_a = array_ops.placeholder(shape=shape, dtype="float32", name="pl_a") tf_b = array_ops.placeholder(shape=shape, dtype="float32", name="pl_b") tf_c = tf.stack([tf_a, tf_b], axis=axis, **kwargs) assert tf_c.op.op_def.name == "Pack", "tf.stack() is expected to produce 'Pack' operation" compare_tf_with_tvm([a, b], ["pl_a:0", "pl_b:0"], "stack:0") def test_forward_pack(): for axis in range(-3, 3): _test_pack(axis, [3, 2, 1]) for axis in range(-1, 1): _test_pack(axis, [3]) _test_pack(0, []) ####################################################################### # Unpack # ------ def _test_forward_unpack(in_shape, axis, dtype): """test operator Unpack""" np_data = np.random.uniform(-100, 100, size=in_shape).astype(dtype) tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder(dtype, in_shape, name="in_data") tf.unstack(in_data, axis=axis, name="Unpack") compare_tf_with_tvm([np_data], ["in_data:0"], "Unpack:0") def test_forward_unpack(): _test_forward_unpack((3,), 0, "int32") _test_forward_unpack((3,), -1, "int16") _test_forward_unpack((21, 23, 3), 2, "float32") ####################################################################### # Range # ----- def test_forward_range(): """test operator Range""" for dtype in [tf.int32, tf.int64]: tf.reset_default_graph() with tf.Graph().as_default(): tf.range(1, 18, 3, name="range", dtype=dtype) compare_tf_with_tvm([], [], "range:0") """test type assignment for operator Range""" tf.reset_default_graph() with tf.Graph().as_default(): tf.range(1, 256 + 1, 1, dtype=tf.float32) compare_tf_with_tvm([], [], "range:0") ####################################################################### # Pad # --- def _test_pad(input_shape, paddings, mode, **kwargs): """One iteration of pad operation with given shape""" x = np.arange(np.prod(input_shape), dtype=np.float32).reshape(input_shape) with tf.Graph().as_default(): in_data = array_ops.placeholder(shape=input_shape, dtype="float32") pad_values = constant_op.constant(paddings) pad = tf.pad(in_data, paddings=pad_values, mode=mode, **kwargs) if mode == "CONSTANT": if "constant_values" in kwargs: out_name = "PadV2:0" else: out_name = "Pad:0" else: out_name = "MirrorPad:0" compare_tf_with_tvm(x, "Placeholder:0", out_name) def test_forward_pad(): """Pad""" _test_pad((2, 3), [[1, 1], [2, 2]], mode="CONSTANT") _test_pad((2, 3), [[1, 1], [2, 2]], mode="CONSTANT", constant_values=1.0) _test_pad((2, 3), [[1, 1], [2, 2]], mode="SYMMETRIC") _test_pad((2, 3), [[1, 1], [2, 2]], mode="REFLECT") ####################################################################### # Logical operators # -------------------- def test_logical_and(): with tf.Graph().as_default(): in1 = tf.placeholder(tf.bool, shape=[1, 4, 4, 3], name="in1") in2 = tf.placeholder(tf.bool, shape=[1, 4, 4, 3], name="in2") out = tf.logical_and(in1, in2, name="out") in_data1 = np.random.choice(a=[False, True], size=(1, 4, 4, 3)).astype("bool") in_data2 = np.random.choice(a=[False, True], size=(1, 4, 4, 3)).astype("bool") compare_tf_with_tvm([in_data1, in_data2], ["in1:0", "in2:0"], "out:0") def test_logical_or(): with tf.Graph().as_default(): in1 = tf.placeholder(tf.bool, shape=[1, 4, 4, 3], name="in1") in2 = tf.placeholder(tf.bool, shape=[1, 4, 4, 3], name="in2") out = tf.logical_or(in1, in2, name="out") in_data1 = np.random.choice(a=[False, True], size=(1, 4, 4, 3)).astype("bool") in_data2 = np.random.choice(a=[False, True], size=(1, 4, 4, 3)).astype("bool") compare_tf_with_tvm([in_data1, in_data2], ["in1:0", "in2:0"], "out:0") def test_logical_xor(): with tf.Graph().as_default(): in1 = tf.placeholder(tf.bool, shape=[1, 4, 4, 3], name="in1") in2 = tf.placeholder(tf.bool, shape=[1, 4, 4, 3], name="in2") out = tf.logical_xor(in1, in2, name="out") in_data1 = np.random.choice(a=[False, True], size=(1, 4, 4, 3)).astype("bool") in_data2 = np.random.choice(a=[False, True], size=(1, 4, 4, 3)).astype("bool") compare_tf_with_tvm([in_data1, in_data2], ["in1:0", "in2:0"], "out:0") def test_logical_not(): with tf.Graph().as_default(): in1 = tf.placeholder(tf.bool, shape=[1, 4, 4, 3], name="in1") out = tf.logical_not(in1, name="out") in_data1 = np.random.choice(a=[False, True], size=(1, 4, 4, 3)).astype("bool") compare_tf_with_tvm(in_data1, "in1:0", "out:0") def test_forward_logical(): test_logical_and() test_logical_or() test_logical_xor() test_logical_not() ####################################################################### # Where, Select, SelectV2 # ------------- def test_forward_where(): """Where: return elements depending on conditions""" with tf.Graph().as_default(): with tf.Session() as sess: input1 = tf.placeholder(tf.int32, shape=[1, 4, 4, 3], name="input1") input2 = tf.placeholder(tf.int32, shape=[1, 4, 4, 3], name="input2") mask = input1 > input2 tf.where(mask, input1 + 1, input2 * 2) in_data1 = np.random.uniform(0, 10, size=(1, 4, 4, 3)).astype("uint32") in_data2 = np.random.uniform(0, 10, size=(1, 4, 4, 3)).astype("uint32") compare_tf_with_tvm([in_data1, in_data2], ["input1:0", "input2:0"], "Select:0") ####################################################################### # Inception V3 # ------------ def test_forward_inception_v3(): """test inception V3 model""" with tf.Graph().as_default(): graph_def = tf_testing.get_workload( "InceptionV3/inception_v3_2016_08_28_frozen-with_shapes.pb" ) # Call the utility to import the graph definition into default graph. graph_def = tf_testing.ProcessGraphDefParam(graph_def) data = np.random.uniform(size=(1, 299, 299, 3)).astype("float32") with tf.Session() as sess: tf_output = run_tf_graph(sess, data, "input:0", "InceptionV3/Predictions/Reshape_1:0") tvm_output = run_tvm_graph(graph_def, data, "input") tvm.testing.assert_allclose(tf_output[0], tvm_output[0], rtol=1e-5, atol=1e-5) ####################################################################### # Inception V1 # ------------ def test_forward_inception_v1(): """test inception V1 model""" with tf.Graph().as_default(): graph_def = tf_testing.get_workload("InceptionV1/classify_image_graph_def-with_shapes.pb") # Call the utility to import the graph definition into default graph. graph_def = tf_testing.ProcessGraphDefParam(graph_def) # Build an image from random data. from PIL import Image from tvm.contrib import utils img_array = np.random.uniform(size=(1, 600, 600, 3)).astype("uint8") img = Image.frombuffer("RGB", (600, 600), img_array.tostring(), "raw", "RGB", 0, 1) temp = utils.tempdir() img_path = temp.relpath("tf-test.jpg") img.save(img_path) import os.path if not tf.gfile.Exists(os.path.join(img_path)): tf.logging.fatal("File does not exist %s", img_path) data = tf.gfile.FastGFile(os.path.join(img_path), "rb").read() temp.remove() # Extract tensorflow decoded image frame for tvm input with tf.Session() as sess: tvm_data = run_tf_graph(sess, data, "DecodeJpeg/contents:0", "DecodeJpeg:0") with tf.Session() as sess: tf_output = run_tf_graph(sess, data, "DecodeJpeg/contents:0", "softmax:0") tvm_output = run_tvm_graph(graph_def, tvm_data, "DecodeJpeg/contents") tvm.testing.assert_allclose(tf_output[0], tvm_output[0], rtol=1e-5, atol=1e-5) ####################################################################### # Mobilenet # --------- def test_forward_mobilenet(): """test mobilenet model""" # MobilenetV2 with tf.Graph().as_default(): graph_def = tf_testing.get_workload( "https://storage.googleapis.com/mobilenet_v2/checkpoints/mobilenet_v2_1.4_224.tgz", "mobilenet_v2_1.4_224_frozen.pb", ) # Call the utility to import the graph definition into default graph. graph_def = tf_testing.ProcessGraphDefParam(graph_def) data = np.random.uniform(size=(1, 224, 224, 3)).astype("float32") out_node = "MobilenetV2/Predictions/Reshape_1" with tf.Session() as sess: # Add shapes to the graph. graph_def = tf_testing.AddShapesToGraphDef(sess, out_node) tf_output = run_tf_graph(sess, data, "input:0", out_node + ":0") tvm_output = run_tvm_graph(graph_def, data, "input") tvm.testing.assert_allclose( np.squeeze(tvm_output[0]), np.squeeze(tf_output[0]), rtol=1e-5, atol=1e-5 ) ####################################################################### # ResnetV2 # -------- @tvm.testing.requires_gpu def test_forward_resnetv2(): """test resnet model""" if is_gpu_available(): with tf.Graph().as_default(): graph_def = tf_testing.get_workload( "ResnetV2/resnet-20180601_resnet_v2_imagenet-shapes.pb" ) # Call the utility to import the graph definition into default graph. graph_def = tf_testing.ProcessGraphDefParam(graph_def) data = np.random.uniform(size=(128, 224, 224, 3)).astype("float32") out_node = "ArgMax" with tf.Session() as sess: tf_output = run_tf_graph(sess, data, "input_tensor:0", out_node + ":0") for device in ["llvm", "cuda"]: dev = tvm.device(device, 0) if not tvm.testing.device_enabled(device): print("Skip because %s is not enabled" % device) continue tvm_output = run_tvm_graph( graph_def, data, "input_tensor", len(tf_output), target=device ) tvm.testing.assert_allclose( np.squeeze(tvm_output[0]), np.squeeze(tf_output[0]), rtol=1e-5, atol=1e-5 ) ####################################################################### # SSD # --- def _test_ssd_impl(): """Test SSD with backbone MobileNet V1""" with tf.Graph().as_default(): graph_def = tf_testing.get_workload( "object_detection/ssd_mobilenet_v1_ppn_shared_" "box_predictor_300x300_coco14_sync_2018_07_03.pb" ) # Call the utility to import the graph definition into default graph. graph_def = tf_testing.ProcessGraphDefParam(graph_def) data = np.random.uniform(0.0, 255.0, size=(1, 512, 512, 3)).astype("uint8") in_node = "image_tensor" out_node = ["detection_boxes", "detection_scores", "detection_classes"] with tf.Session() as sess: tf_output = run_tf_graph( sess, data, "{}:0".format(in_node), ["{}:0".format(oname) for oname in out_node] ) # TODO(kevinthesun): enable gpu test when VM heterogeneous execution is ready. for device in ["llvm"]: dev = tvm.device(device, 0) if not tvm.testing.device_enabled(device): print("Skip because %s is not enabled" % device) continue tvm_output = run_tvm_graph( graph_def, data, in_node, len(out_node), target=device, layout="NCHW", out_names=out_node, mode="vm", disabled_pass=["FoldScaleAxis"], serialize=True, ) for i in range(len(out_node)): tvm.testing.assert_allclose(tvm_output[i], tf_output[i], rtol=1e-3, atol=1e-3) @pytest.mark.skip("neo-ai/tvm: skip because stack limit of 100mb is exceeded by WellFormedChecker") def test_forward_ssd(): run_thread = threading.Thread(target=_test_ssd_impl, args=()) old_stack_size = threading.stack_size(100 * 1024 * 1024) run_thread.start() run_thread.join() threading.stack_size(old_stack_size) ####################################################################### # Placeholder # ----------- def test_forward_placeholder(): """test a simple pb with Placeholder node in the end of GraphDef""" with tf.Graph().as_default(): graph_def = tf_testing.get_workload("Custom/placeholder.pb") # Call the utility to import the graph definition into default graph. graph_def = tf_testing.ProcessGraphDefParam(graph_def) data = np.random.uniform(size=(1, 224, 224, 3)).astype("float32") out_node = "mul" with tf.Session() as sess: # Add shapes to the graph. graph_def = tf_testing.AddShapesToGraphDef(sess, out_node) tf_output = run_tf_graph(sess, data, "Placeholder:0", out_node + ":0") tvm_output = run_tvm_graph(graph_def, data, "Placeholder") tvm.testing.assert_allclose( np.squeeze(tvm_output[0]), np.squeeze(tf_output[0]), rtol=1e-5, atol=1e-5 ) ####################################################################### # PTB # --- try: # Load contrib for running ptb model in tf version before 2.0 import tensorflow.contrib except: pass def test_forward_ptb(): """test ptb model""" config = tf_testing.get_config() num_steps = config.num_steps num_hidden = config.hidden_size num_layers = config.num_layers batch_size = config.batch_size vocab_size = config.vocab_size out_sample_shape = (batch_size, vocab_size) out_state_shape = (batch_size, num_hidden) # Sample input inpt = "we have no useful information on" cnt_sample = 20 def _pretty_print(items, is_char_model, id2word): if not is_char_model: return " ".join([id2word[x] for x in items]) else: return "".join([id2word[x] for x in items]).replace("_", " ") def _get_tvm_graph_module(graph_def): # Cell inputs 'c and 'h' consist of all layers values shape_dict = {"Model/Placeholder": (batch_size, num_steps)} mod, params = relay.frontend.from_tensorflow( graph_def, shape=shape_dict, outputs=[ "Model/Softmax:0", "Model/RNN/RNN/multi_rnn_cell/cell_0/lstm_cell/LSTMBlockCell:1", "Model/RNN/RNN/multi_rnn_cell/cell_0/lstm_cell/LSTMBlockCell:6", "Model/RNN/RNN/multi_rnn_cell/cell_0/lstm_cell/LSTMBlockCell_1:1", "Model/RNN/RNN/multi_rnn_cell/cell_0/lstm_cell/LSTMBlockCell_1:6", ], ) target = "llvm" with tvm.transform.PassContext(opt_level=0): graph, lib, params = relay.build(mod, target, params=params) from tvm.contrib import graph_executor dev = tvm.cpu(0) return params, graph_executor.create(graph, lib, dev) def _do_tvm_sample(model, data, in_states, params, num_samples): """Sampled from the model""" samples = [] state = in_states sample = None def _get_sample(data, state): input_data = np.full((batch_size, num_steps), data, dtype="int32") model.set_input("Model/Placeholder", tvm.nd.array(input_data.astype("int32"))) model.set_input( "Model/MultiRNNCellZeroState/LSTMBlockCellZeroState/zeros", tvm.nd.array(state[0].astype("float32")), ) model.set_input( "Model/MultiRNNCellZeroState/LSTMBlockCellZeroState/zeros_1", tvm.nd.array(state[1].astype("float32")), ) model.set_input( "Model/MultiRNNCellZeroState/LSTMBlockCellZeroState_1/zeros", tvm.nd.array(state[2].astype("float32")), ) model.set_input( "Model/MultiRNNCellZeroState/LSTMBlockCellZeroState_1/zeros_1", tvm.nd.array(state[3].astype("float32")), ) model.set_input(**params) model.run() tvm_output = model.get_output(0, tvm.nd.empty(out_sample_shape, "float32")).numpy() state_output = [] for i in range(4): state_output.append( model.get_output(i + 1, tvm.nd.empty(out_state_shape, "float32")).numpy() ) sample = tf_testing.pick_from_weight(tvm_output[0]) return sample, state_output for x in data: sample, state = _get_sample(x, state) if sample is not None: samples.append(sample) else: samples.append(0) k = 1 while k < num_samples: sample, state = _get_sample(samples[-1], state) samples.append(sample) k += 1 return samples, state with tf.Graph().as_default(): word_to_id, id_to_word, graph_def = tf_testing.get_workload_ptb() vocab_size = len(word_to_id) # Call the utility to import the graph definition into default graph. graph_def = tf_testing.ProcessGraphDefParam(graph_def) sess = tf.Session() # TVM graph module creation params, m = _get_tvm_graph_module(graph_def) # Create 10 predicted statments of 20 words cnt_stm = 0 while cnt_stm < 10: cnt_stm += 1 in_state = [np.full((batch_size, num_hidden), 0, dtype="float32")] * 2 * num_layers seed_for_sample = inpt.split() tvm_samples, tvm_state = _do_tvm_sample( m, [word_to_id[word] for word in seed_for_sample], in_state, params, cnt_sample ) tvm_sample_str = _pretty_print(tvm_samples, False, id_to_word) tf_samples, tf_state = tf_testing.do_tf_sample( sess, [word_to_id[word] for word in seed_for_sample], in_state, cnt_sample ) tf_sample_str = _pretty_print(tf_samples, False, id_to_word) inpt = tvm_sample_str tvm.testing.assert_allclose(tf_samples, tvm_samples, rtol=1e-5, atol=1e-5) assert tvm_sample_str == tf_sample_str ####################################################################### # LRN (Local Response Normalization) # ---------------------------------- def _test_lrn(ishape, size, axis, bias, alpha, beta): """testing local response normalization""" lrn_depth_radius = size / 2 inp_array = np.random.uniform(size=ishape).astype(np.float32) with tf.Graph().as_default(): in1 = tf.placeholder(shape=inp_array.shape, dtype=inp_array.dtype, name="lrn0_data") nn_ops.local_response_normalization( in1, name="lrn", depth_radius=lrn_depth_radius, bias=bias, alpha=alpha, beta=beta ) compare_tf_with_tvm(inp_array, "lrn0_data:0", "lrn:0") def test_forward_lrn(): _test_lrn((1, 3, 20, 20), 3, 1, 1.0, 1.0, 0.5) ####################################################################### # l2_normalize # ------------ def _test_l2_normalize(ishape, eps, axis): """testing l2 normalize (uses max, sum, square, sqrt frontend operators)""" inp_array = np.random.uniform(size=ishape).astype(np.float32) with tf.Graph().as_default(): in1 = tf.placeholder(shape=inp_array.shape, dtype=inp_array.dtype) nn.l2_normalize(in1, axis=axis, epsilon=eps, name=None, dim=None) compare_tf_with_tvm(inp_array, "Placeholder:0", "l2_normalize:0") def test_forward_l2_normalize(): _test_l2_normalize((1, 3, 20, 20), 0.001, (0,)) ####################################################################### # transpose # --------- def _test_forward_transpose(ishape, axes=None): data = np.random.uniform(size=ishape).astype(np.float32) with tf.Graph().as_default(): in1 = tf.placeholder(shape=data.shape, dtype=data.dtype, name="transpose_data") if axes is None: tf.transpose(in1) else: tf.transpose(in1, perm=axes) compare_tf_with_tvm(data, "transpose_data:0", "transpose:0") def _test_forward_tranapose_axes_input(ishape, axes): data = np.random.uniform(size=ishape).astype(np.float32) axes_np = np.array(axes).astype(np.int32) with tf.Graph().as_default(): in1 = tf.placeholder(shape=data.shape, dtype=data.dtype, name="transpose_data") const1 = tf.constant(axes_np, dtype=tf.int32) # make axes an input to tf.transpose, but not an input to the graph, # so it can be extracted with infer_value_simulated axes = tf.reverse(const1, axis=[-1]) tf.transpose(in1, axes) compare_tf_with_tvm([data], ["transpose_data:0"], "transpose:0") def test_forward_transpose(): _test_forward_transpose((2, 3, 4), (1, 2, 0)) _test_forward_transpose((2, 3, 4)) _test_forward_transpose((7, 8, 8, 10)) _test_forward_transpose((2, 3, 4), (1, 2, 0)) _test_forward_transpose((2, 3, 4), (0, 1, 2)) _test_forward_transpose((2, 3, 4, 5), (3, 0, 1, 2)) _test_forward_tranapose_axes_input((2, 3, 4), (1, 2, 0)) _test_forward_tranapose_axes_input((2, 3, 4, 5), (3, 0, 1, 2)) def _test_forward_slice_operation_input(input_value, begin_value, size_value): input_data = np.array(input_value, dtype=np.float32) with tf.Graph().as_default(): input_tensor = tf.placeholder(shape=input_data.shape, dtype=input_data.dtype, name="input") tf.slice(input_tensor, begin_value, size_value, name="slice_output") compare_tf_with_tvm([input_data], ["input:0"], "slice_output:0") def test_forward_slice(): _test_forward_slice_operation_input([1, 1], [0], [2]) _test_forward_slice_operation_input([0, 1, 2, 3], [3], [-1]) _test_forward_slice_operation_input( [[0, 1, 2, 3], [4, 5, 6, 7]], begin_value=[0, 1], size_value=[-1, -1] ) def test_forward_ceil(): ishape = (1, 3, 10, 10) inp_array = np.random.uniform(size=ishape).astype(np.float32) with tf.Graph().as_default(): in1 = tf.placeholder(shape=inp_array.shape, dtype=inp_array.dtype) tf.ceil(in1) compare_tf_with_tvm(inp_array, "Placeholder:0", "Ceil:0") def test_forward_floor(): ishape = (1, 3, 10, 10) inp_array = np.random.uniform(size=ishape).astype(np.float32) with tf.Graph().as_default(): in1 = tf.placeholder(shape=inp_array.shape, dtype=inp_array.dtype) tf.floor(in1) compare_tf_with_tvm(inp_array, "Placeholder:0", "Floor:0") def test_forward_relu(): ishape = (1, 3, 10, 10) inp_array = np.random.uniform(-5, 5, size=ishape).astype(np.float32) for mode in ["graph_executor", "vm"]: with tf.Graph().as_default(): in1 = tf.placeholder(shape=inp_array.shape, dtype=inp_array.dtype) tf.nn.relu(in1) compare_tf_with_tvm(inp_array, "Placeholder:0", "Relu:0", mode=mode) def test_forward_leaky_relu(): ishape = (1, 3, 10, 10) inp_array = np.random.uniform(-5, 5, size=ishape).astype(np.float32) for mode in ["graph_executor", "vm"]: with tf.Graph().as_default(): in1 = tf.placeholder(shape=inp_array.shape, dtype=inp_array.dtype) tf.nn.leaky_relu(in1, alpha=0.4) compare_tf_with_tvm(inp_array, "Placeholder:0", "LeakyRelu:0", mode=mode) def test_forward_elu(): ishape = (1, 3, 10, 10) inp_array = np.random.uniform(-5, 5, size=ishape).astype(np.float32) with tf.Graph().as_default(): in1 = tf.placeholder(shape=inp_array.shape, dtype=inp_array.dtype) tf.nn.elu(in1) compare_tf_with_tvm(inp_array, "Placeholder:0", "Elu:0") def test_forward_selu(): ishape = (1, 3, 10, 10) inp_array = np.random.uniform(-5, 5, size=ishape).astype(np.float32) with tf.Graph().as_default(): in1 = tf.placeholder(shape=inp_array.shape, dtype=inp_array.dtype) tf.nn.selu(in1) compare_tf_with_tvm(inp_array, "Placeholder:0", "Selu:0") def test_forward_tanh(): ishape = (1, 3, 10, 10) inp_array = np.random.uniform(-5, 5, size=ishape).astype(np.float32) with tf.Graph().as_default(): in1 = tf.placeholder(shape=inp_array.shape, dtype=inp_array.dtype) tf.nn.tanh(in1) compare_tf_with_tvm(inp_array, "Placeholder:0", "Tanh:0") ####################################################################### # Softmax # ------- def test_forward_softmax(): """test operator Softmax""" def check_softmax(in_shape, axis, dtype): np_data = np.random.uniform(-100, 100, size=in_shape).astype(dtype) tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder(dtype, in_shape, name="in_data") tf.nn.softmax(in_data, axis=axis, name="Softmax") compare_tf_with_tvm([np_data], ["in_data:0"], "Softmax:0") check_softmax((2, 3, 5), 2, "float32") check_softmax((2, 3, 5), -1, "float32") ####################################################################### # Tensor # ------ def test_forward_round(): """test Round""" np_data = np.random.uniform(-10, 10, size=(5, 7)).astype(np.float32) tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder(tf.float32, (5, 7), name="in_data") tf.round(in_data, name="round") compare_tf_with_tvm([np_data], ["in_data:0"], "round:0") def test_forward_abs(): """test operator Abs""" np_data = np.random.uniform(1, 100, size=(9, 11)).astype(np.float32) tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder(tf.float32, (9, 11), name="in_data") tf.math.abs(in_data, name="abs") compare_tf_with_tvm([np_data], ["in_data:0"], "abs:0") def _test_forward_zeros_like(in_shape, dtype): np_data = np.random.uniform(-10, 10, size=in_shape).astype(dtype) tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder(dtype, in_shape, name="in_data") tf.zeros_like(in_data, name="zeros_like") compare_tf_with_tvm([np_data], ["in_data:0"], "zeros_like:0") def test_forward_zeros_like(): if tf.__version__ < LooseVersion("1.2"): _test_forward_zeros_like((2, 3), "int32") _test_forward_zeros_like((2, 3, 5), "int8") _test_forward_zeros_like((2, 3, 5, 7), "uint16") _test_forward_zeros_like((2, 3, 11), "float32") _test_forward_zeros_like((2, 3, 11), "float64") def test_forward_squared_difference(): ishape = (1, 3, 10, 14) inp_array_a = np.random.uniform(-5, 5, size=ishape).astype(np.float32) inp_array_b = np.random.uniform(-5, 5, size=ishape).astype(np.float32) with tf.Graph().as_default(): in1 = tf.placeholder(shape=inp_array_a.shape, dtype=inp_array_a.dtype, name="in1") in2 = tf.placeholder(shape=inp_array_b.shape, dtype=inp_array_b.dtype, name="in2") out = tf.math.squared_difference(in1, in2) compare_tf_with_tvm([inp_array_a, inp_array_b], [in1.name, in2.name], out.name) def _test_forward_reverse_v2(in_shape, axis, dtype): np_data = np.random.uniform(-10, 10, size=in_shape).astype(dtype) tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder(dtype, in_shape, name="in_data") tf.reverse(in_data, axis=[axis], name="reverse") compare_tf_with_tvm([np_data], ["in_data:0"], "reverse:0") def test_forward_reverse_v2(): """test ReverseV2""" _test_forward_reverse_v2((2, 3), 0, "int32") _test_forward_reverse_v2((2, 3, 5), 2, "float32") _test_forward_reverse_v2((2, 3, 5, 7), 1, "float32") _test_forward_reverse_v2((2, 3, 5), -1, "float64") _test_forward_reverse_v2((2, 3, 5), -3, "float64") def test_forward_sign(): """test Sign""" np_data = np.random.uniform(-10, 10, size=(5, 7, 11)).astype(np.float32) tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder(tf.float32, (5, 7, 11), name="in_data") tf.sign(in_data, name="sign") compare_tf_with_tvm([np_data], ["in_data:0"], "sign:0") def test_forward_square(): """test operator Square""" np_data = np.random.uniform(1, 100, size=(2, 3, 5)).astype(np.float32) tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder(tf.float32, (2, 3, 5), name="in_data") tf.square(in_data, name="square") compare_tf_with_tvm([np_data], ["in_data:0"], "square:0") def test_forward_pow_exp(): """test Pow and Exp""" np_in1 = np.random.uniform(-2, 2, size=(5, 7, 11)).astype(np.float32) np_in2 = np.random.uniform(-2, 2, size=(5, 7, 11)).astype(np.float32) tf.reset_default_graph() with tf.Graph().as_default(): in1 = tf.placeholder(tf.float32, (5, 7, 11), name="in1") in2 = tf.placeholder(tf.float32, (5, 7, 11), name="in2") out1 = tf.pow(in1, in2, name="pow") out = tf.exp(in1, name="exp") compare_tf_with_tvm([np_in1, np_in2], ["in1:0", "in2:0"], "pow:0") compare_tf_with_tvm([np_in1], ["in1:0"], "exp:0") def test_forward_unary(): def _test_forward_unary(op, a_min=1, a_max=5, dtype=np.float32): """test unary operators""" np_data = np.random.uniform(a_min, a_max, size=(2, 3, 5)).astype(dtype) tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder(dtype, (2, 3, 5), name="in_data") out = op(in_data) compare_tf_with_tvm([np_data], ["in_data:0"], out.name) _test_forward_unary(tf.acos, -1, 1) _test_forward_unary(tf.asin, -1, 1) _test_forward_unary(tf.atanh, -1, 1) _test_forward_unary(tf.sinh) _test_forward_unary(tf.cosh) _test_forward_unary(tf.acosh) _test_forward_unary(tf.asinh) _test_forward_unary(tf.atan) _test_forward_unary(tf.sin) _test_forward_unary(tf.cos) _test_forward_unary(tf.tan) _test_forward_unary(tf.tanh) _test_forward_unary(tf.erf) _test_forward_unary(tf.log) _test_forward_unary(tf.log1p) def test_forward_atan2(): """test operator tan""" tf.disable_eager_execution() np_data_1 = np.random.uniform(1, 100, size=(2, 3, 5)).astype(np.float32) np_data_2 = np.random.uniform(1, 100, size=(2, 3, 5)).astype(np.float32) tf.reset_default_graph() in_data_1 = tf.placeholder(tf.float32, (2, 3, 5), name="in_data_1") in_data_2 = tf.placeholder(tf.float32, (2, 3, 5), name="in_data_2") tf.atan2(in_data_1, in_data_2, name="atan2") compare_tf_with_tvm([np_data_1, np_data_2], ["in_data_1:0", "in_data_2:0"], "atan2:0") def test_forward_expm1(): """test operator expm1""" def _test_forward_expm1(shape): tf.disable_eager_execution() np_data = np.random.uniform(1, 10, size=shape).astype(np.float32) tf.reset_default_graph() in_data = tf.placeholder(tf.float32, shape, name="in_data") tf.expm1(in_data, name="expm1") compare_tf_with_tvm([np_data], ["in_data:0"], "expm1:0") _test_forward_expm1([1, 100]) _test_forward_expm1([1, 10, 10]) _test_forward_expm1([2, 5, 2, 5]) def test_forward_softsign(): """test operator softsign""" def _test_forward_softsign(shape): tf.disable_eager_execution() np_data = np.random.uniform(1, 100, size=shape).astype(np.float32) tf.reset_default_graph() in_data = tf.placeholder(tf.float32, shape, name="in_data") tf.nn.softsign(in_data, name="softsign") compare_tf_with_tvm([np_data], ["in_data:0"], "softsign:0") _test_forward_softsign([1, 100]) _test_forward_softsign([1, 10, 10]) _test_forward_softsign([2, 5, 2, 5]) def test_forward_rint(): """test operator rint""" def _test_forward_rint(shape): tf.disable_eager_execution() np_data = np.random.uniform(-100, 100, size=shape).astype(np.float32) tf.reset_default_graph() in_data = tf.placeholder(tf.float32, shape, name="in_data") tf.math.rint(in_data, name="rint") compare_tf_with_tvm([np_data], ["in_data:0"], "rint:0") _test_forward_rint([100]) _test_forward_rint([1, 100]) _test_forward_rint([1, 10, 10]) _test_forward_rint([2, 5, 2, 5]) def test_forward_negative(): """test tf operator Neg""" np_data = np.random.uniform(-100, 255, size=(224, 224, 3)).astype(np.float32) tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder(tf.float32, (224, 224, 3), name="in_data") tf.negative(in_data, name="negative") compare_tf_with_tvm([np_data], ["in_data:0"], "negative:0") def test_forward_log_softmax(): """test operator LogSoftmax""" np_data = np.random.uniform(1, 100, size=(9, 11)).astype(np.float32) tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder(tf.float32, (9, 11), name="in_data") tf.math.log_softmax(in_data, name="LogSoftmax") compare_tf_with_tvm([np_data], ["in_data:0"], "LogSoftmax:0") def test_forward_softplus(): """test operator Softplus""" np_data = np.random.uniform(1, 10, size=(2, 3, 5)).astype(np.float32) tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder(tf.float32, (2, 3, 5), name="in_data") tf.nn.softplus(in_data, name="softplus") compare_tf_with_tvm([np_data], ["in_data:0"], "softplus:0") def test_forward_rsqrt(): """test Rsqrt""" np_data = np.random.uniform(1, 100, size=(5, 7, 11)).astype(np.float32) tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder(tf.float32, (5, 7, 11), name="in_data") tf.rsqrt(in_data, name="rsqrt") compare_tf_with_tvm([np_data], ["in_data:0"], "rsqrt:0") def test_forward_sqrt(): """test Sqrt""" np_data = np.random.uniform(1, 100, size=(5, 7, 11)).astype(np.float32) tf.reset_default_graph() with tf.Graph().as_default(): in_data = tf.placeholder(tf.float32, (5, 7, 11), name="in_data") tf.sqrt(in_data, name="sqrt") compare_tf_with_tvm([np_data], ["in_data:0"], "sqrt:0") def _test_forward_right_shift(in_shape, dtype): """test operator RightShift""" lh_data = np.random.randint(1, 3, size=in_shape).astype(dtype) rh_data = np.random.randint(1, 8, size=in_shape).astype(dtype) tf.reset_default_graph() with tf.Graph().as_default(): lft_data = tf.placeholder(dtype, in_shape, name="lft_data") rgt_data = tf.placeholder(dtype, in_shape, name="rgt_data") tf.bitwise.right_shift(lft_data, rgt_data, name="RightShift") compare_tf_with_tvm([lh_data, rh_data], ["lft_data:0", "rgt_data:0"], "RightShift:0") def test_forward_right_shift(): _test_forward_right_shift((7,), "int32") _test_forward_right_shift((3, 11), "int16") def _test_forward_left_shift(in_shape, dtype): """test operator LeftShift""" lh_data = np.random.randint(100, 1000000, size=in_shape).astype(dtype) rh_data = np.random.randint(1, 3, size=in_shape).astype(dtype) tf.reset_default_graph() with tf.Graph().as_default(): lft_data = tf.placeholder(dtype, in_shape, name="lft_data") rgt_data = tf.placeholder(dtype, in_shape, name="rgt_data") tf.bitwise.left_shift(lft_data, rgt_data, name="LeftShift") compare_tf_with_tvm([lh_data, rh_data], ["lft_data:0", "rgt_data:0"], "LeftShift:0") def test_forward_left_shift(): _test_forward_left_shift((10,), "int32") _test_forward_left_shift((224, 224, 3), "int16") ####################################################################### # Mean # ---- def test_forward_mean(): def check_mean(ishape, **kwargs): inp_array = np.random.uniform(size=ishape).astype(np.float32) with tf.Graph().as_default(): in1 = tf.placeholder(shape=inp_array.shape, dtype=inp_array.dtype) tf.keras.backend.mean(in1, **kwargs) compare_tf_with_tvm(inp_array, "Placeholder:0", "Mean:0", no_gpu=True) check_mean((10, 8, 16, 32)) check_mean((10, 8, 16, 32), axis=(2, 3)) check_mean((10, 8, 16, 32), axis=(1, 2), keepdims=True) ####################################################################### # Size # ---- def test_forward_size(): def check_size(ishape): np_input = np.random.uniform(size=ishape).astype(np.float32) # if all dimensions are constant, TF will optimize away size operator into constant tf_input_shape = list(np_input.shape) tf_input_shape[0] = None with tf.Graph().as_default(): input = tf.placeholder(shape=tf_input_shape, dtype=np_input.dtype, name="input") tf.size(input, name="size") compare_tf_with_tvm([np_input], ["input:0"], "size:0") check_size((10, 8, 16, 32)) check_size((10,)) ####################################################################### # All, Any, Max, Min, Prod, variance, std, logsumexp, euclidean_norm # ------------------------------------------------------------------ def test_forward_reduce(): def _check_op(tf_op, ishape, axis, keepdims, dtype="float32"): tf.reset_default_graph() if dtype == "bool": np_data = np.random.choice([True, False], size=ishape) else: np_data = np.random.uniform(size=ishape).astype(dtype) if tf_op == tf.math.reduce_prod: axis = 1 np_data = np_data.reshape(1, -1) with tf.Graph().as_default(): in_data = tf.placeholder(dtype, name="in_data") reduce_op = tf_op(in_data, axis=axis, keepdims=keepdims, name="reduce_std") compare_tf_with_tvm([np_data], ["in_data:0"], reduce_op.name) def _test_math_op(op, dtypes=["int32", "float32"]): for dtype in dtypes: _check_op(op, (3, 10), axis=(-1), keepdims=False, dtype=dtype) _check_op(op, (8, 16, 32), axis=(-1), keepdims=False, dtype=dtype) _check_op(op, (1, 8, 8, 3), axis=(2, 3), keepdims=True, dtype=dtype) _check_op(op, (2, 3, 10, 10), axis=(1, 2), keepdims=True, dtype=dtype) _test_math_op(tf.math.reduce_all, dtypes=["bool"]) _test_math_op(tf.math.reduce_any, dtypes=["bool"]) _test_math_op(tf.math.reduce_max) _test_math_op(tf.math.reduce_min) _test_math_op(tf.math.reduce_prod) _test_math_op(tf.math.reduce_variance, dtypes=["float32"]) _test_math_op(tf.math.reduce_std, dtypes=["float32"]) _test_math_op(tf.math.reduce_logsumexp, dtypes=["float32"]) if package_version.parse(tf.VERSION) >= package_version.parse("1.15.0"): _test_math_op(tf.math.reduce_euclidean_norm) ####################################################################### # All, Max, Min # ------------------------------------------------------------------ def test_forward_raw_reduce(): def _check_op(tf_op, ishape, axis, keepdims, range_axis=False, dtype="float32"): tf.reset_default_graph() if dtype == "bool": np_data = np.random.choice([True, False], size=ishape) else: np_data = np.random.uniform(size=ishape).astype(dtype) if tf_op == tf.math.reduce_prod: axis = 1 np_data = np_data.reshape(1, -1) with tf.Graph().as_default(): if range_axis: axis = tf.range(axis[0], axis[1], axis[2], name="range", dtype="int32") in_data = tf.placeholder(dtype, name="in_data") reduce_op = tf_op(input=in_data, axis=axis, keep_dims=keepdims, name="reduce_std") compare_tf_with_tvm([np_data], ["in_data:0"], reduce_op.name) def _test_raw_reduce_op(op, dtypes=["int32", "float32"]): for dtype in dtypes: _check_op(op, (3, 10), axis=(-1), keepdims=False, dtype=dtype) _check_op(op, (8, 16, 32), axis=(-1), keepdims=False, dtype=dtype) _check_op(op, (1, 8, 8, 3), axis=(2, 3), keepdims=True, dtype=dtype) _check_op(op, (2, 3, 10, 10), axis=(1, 2), keepdims=True, dtype=dtype) _check_op(op, (1, 8, 8, 3), axis=(2, 4, 1), keepdims=True, range_axis=True, dtype=dtype) _check_op( op, (2, 3, 10, 10), axis=(1, 3, 1), keepdims=True, range_axis=True, dtype=dtype ) if package_version.parse(tf.VERSION) >= package_version.parse("2.4.1"): _test_raw_reduce_op(tf.raw_ops.All, dtypes=["bool"]) _test_raw_reduce_op(tf.raw_ops.Max) _test_raw_reduce_op(tf.raw_ops.Min) ####################################################################### # Relational operators # -------------------- def _test_forward_rel_op(data, func): with tf.Graph().as_default(): in1 = tf.placeholder(shape=data[0].shape, dtype=data[0].dtype, name="in1") in2 = tf.placeholder(shape=data[1].shape, dtype=data[1].dtype, name="in2") op = func(in1, in2, name="op") out = tf.cast(op, tf.int32, name="out1") compare_tf_with_tvm([data[0], data[1]], ["in1:0", "in2:0"], "out1:0") def test_forward_rel_ops(): t1 = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]]) t2 = np.array([[9, 8, 7], [6, 5, 4], [3, 2, 1]]) _test_forward_rel_op([t1, t2], math_ops.less) _test_forward_rel_op([t1, t2], math_ops.greater) _test_forward_rel_op([t1, t2], math_ops.less_equal) _test_forward_rel_op([t1, t2], math_ops.greater_equal) _test_forward_rel_op([t1, t2], math_ops.equal) _test_forward_rel_op([t1, t2], math_ops.not_equal) ####################################################################### # ExpandDims # ---------- def _test_forward_expand_dims(data, axis): with tf.Graph().as_default(): in1 = tf.placeholder(shape=data.shape, dtype=data.dtype, name="in1") out = tf.expand_dims(in1, axis) compare_tf_with_tvm([data], [in1.name], out.name) def test_forward_expand_dims(): _test_forward_expand_dims(np.int32(1), 0) _test_forward_expand_dims(np.array([1]), 0) _test_forward_expand_dims(np.array([1]), -1) _test_forward_expand_dims(np.array([[1], [2]]), 0) _test_forward_expand_dims(np.array([[1], [2]]), 1) _test_forward_expand_dims(np.array([[1], [2]]), -1) ####################################################################### # Maximum, Minimum # ---------------- def test_forward_maximum(): """test Op Maximum""" def check_maximum(lh_shape, rh_shape, dtype): tf.reset_default_graph() lh_data = np.random.uniform(size=lh_shape).astype(dtype) rh_data = np.random.uniform(size=rh_shape).astype(dtype) with tf.Graph().as_default(): lft_data = tf.placeholder(dtype, name="lft_data") rgt_data = tf.placeholder(dtype, name="rgt_data") tf.math.maximum(lft_data, rgt_data, name="maximum") compare_tf_with_tvm([lh_data, rh_data], ["lft_data:0", "rgt_data:0"], "maximum:0") check_maximum((10, 8, 16, 32), (1,), dtype="int32") check_maximum((10, 8, 16, 32), (10, 8, 16, 32), dtype="float32") def test_forward_minimum(): """test Op Minimum""" def check_minimum(lh_shape, rh_shape, dtype): tf.reset_default_graph() lh_data = np.random.uniform(size=lh_shape).astype(dtype) rh_data = np.random.uniform(size=rh_shape).astype(dtype) with tf.Graph().as_default(): lft_data = tf.placeholder(dtype, name="lft_data") rgt_data = tf.placeholder(dtype, name="rgt_data") tf.math.minimum(lft_data, rgt_data, name="minimum") compare_tf_with_tvm([lh_data, rh_data], ["lft_data:0", "rgt_data:0"], "minimum:0") check_minimum((10, 8, 16, 32), (1,), dtype="int32") check_minimum((10, 8, 16, 32), (10, 8, 16, 32), dtype="float32") ####################################################################### # PlaceholderWithDefault # ---------------------- def test_placeholder(): with tf.Graph().as_default(): in_data1 = np.random.uniform(-5, 5, size=(3, 4, 5)).astype(np.float32) var1 = tf.Variable(in_data1, name="in1") var2 = array_ops.placeholder_with_default(var1, None, name="place1") in_data2 = np.random.uniform(-5, 5, size=(3, 4, 5)).astype(np.float32) place1 = array_ops.placeholder(shape=in_data1.shape, dtype=in_data1.dtype, name="in2") out1 = tf.math.add(var1, var2, name="out1") out2 = tf.math.add(out1, place1, name="out2") compare_tf_with_tvm( [in_data1, in_data2], ["place1:0", "in2:0"], "out2:0", init_global_variables=True ) ####################################################################### # OneHot # ---------------------- def _test_forward_one_hot(indices_shape, depth, on_value, off_value, axis, out_dtype): inp_array1 = np.random.randint(0, 5, size=indices_shape) with tf.Graph().as_default(): in1 = tf.placeholder(shape=inp_array1.shape, dtype=inp_array1.dtype) out = tf.one_hot(in1, depth, on_value, off_value, axis, dtype=out_dtype) compare_tf_with_tvm(inp_array1, in1.name, out.name) def test_forward_one_hot(): _test_forward_one_hot((3,), 3, 1, 0, -1, "int32") _test_forward_one_hot((3,), 3, 1.0, 0.0, -1, "float32") _test_forward_one_hot((2, 2), 5, 2, -2, 0, "int32") _test_forward_one_hot((2, 2), 5, 0.5, -0.5, 1, "float32") _test_forward_one_hot((3, 2, 4, 5), 6, 1, 0, 1, "int32") _test_forward_one_hot((3, 2, 4, 5), 6, 1.0, 0.0, 0, "float32") ####################################################################### # AddN # ---------------------- def _test_forward_add_n(inputs): tf.reset_default_graph() with tf.Graph().as_default(): temp = [] for each in inputs: temp.append(tf.placeholder(shape=each.shape, dtype=each.dtype)) output = tf.add_n(temp) compare_tf_with_tvm([each for each in inputs], [each.name for each in temp], output.name) def test_forward_add_n(): x = np.random.randint(1, 100, size=(3, 3, 3), dtype=np.int32) y = np.random.randint(1, 100, size=(3, 3, 3), dtype=np.int32) z = np.random.randint(1, 100, size=(3, 3, 3), dtype=np.int32) m, n, o = x.astype(np.float32), y.astype(np.float32), z.astype(np.float32) in0 = x in1 = [x, y] in2 = (x, y, z) in3 = m in4 = [m, n] in5 = (m, n, o) _test_forward_add_n(in0) _test_forward_add_n(in1) _test_forward_add_n(in2) _test_forward_add_n(in3) _test_forward_add_n(in4) _test_forward_add_n(in5) ####################################################################### # Sharing params case # ---------------------- def test_sharing_node(): """Test the sharing params case.""" np_data = np.random.uniform(size=(2, 2, 2)).astype("float32") with tf.Graph().as_default(): in_data = tf.placeholder(tf.float32, shape=(2, 2, 2), name="in_data") axis = tf.constant([-1], dtype=tf.int32, name="axis") mean0 = tf.reduce_mean(in_data, axis=axis, keepdims=False, name="mean0") mean1 = tf.reduce_mean(in_data, axis=axis, keepdims=False, name="mean1") out = tf.add(mean0, mean1, name="out") compare_tf_with_tvm([np_data], ["in_data:0"], "out:0") ####################################################################### # Unravel Index # ---------------------- def _test_forward_unravel_index(inputs): tf.reset_default_graph() with tf.Graph().as_default(): temp = [] for each in inputs: temp.append(tf.placeholder(shape=each.shape, dtype=each.dtype)) output = tf.unravel_index(temp[0], temp[1]) compare_tf_with_tvm([each for each in inputs], [each.name for each in temp], output.name) def _test_forward_unravel_index_scalar(x, y, dtype="int32"): tf.reset_default_graph() with tf.Graph().as_default(): indices_1 = constant_op.constant(x, dtype=dtype) dims_1 = constant_op.constant(y, dtype=dtype) out_1 = array_ops.unravel_index(indices_1, dims_1) compare_tf_with_tvm([], [], out_1.name) def test_forward_unravel_index(): x = np.array([0, 1, 2, 3]) y = np.array([2, 2]) _test_forward_unravel_index([x, y]) x = np.array([0, 1, 2, 5]) y = np.array([2, 3]) _test_forward_unravel_index([x, y]) x = np.array([0, 1, 2, 5]) y = np.array([6]) _test_forward_unravel_index([x, y]) x = np.array([102, 300, 16]) y = np.array([10, 10, 9, 6]) _test_forward_unravel_index([x, y]) x = np.array([100]) y = np.array([10, 10, 9, 6]) _test_forward_unravel_index([x, y]) # Test scalar input _test_forward_unravel_index_scalar(13, [1, 4, 5, 2]) ####################################################################### # Dilation2d # ---------------------- def _test_dilation2d(tensor_in_sizes, filter_in_sizes, strides, dilations, padding): """One iteration of dilation2d with given shapes and attributes""" total_size_1 = np.prod(tensor_in_sizes) total_size_2 = np.prod(filter_in_sizes) # Initializes the input tensor with array containing incrementing # numbers from 1. data_array = [f * 1.0 for f in range(1, total_size_1 + 1)] filter_array = [f * 1.0 for f in range(1, total_size_2 + 1)] with tf.Graph().as_default(): in_data = array_ops.placeholder(shape=tensor_in_sizes, dtype="float32") in_filter = constant_op.constant(filter_array, shape=filter_in_sizes, dtype="float32") nn_ops.dilation2d(in_data, in_filter, strides=strides, rates=dilations, padding=padding) compare_tf_with_tvm( np.reshape(data_array, tensor_in_sizes).astype("float32"), "Placeholder:0", "Dilation2D:0", no_gpu=True, ) def test_forward_dilation(): _test_dilation2d([1, 18, 18, 32], [4, 4, 32], [1, 1, 1, 1], [1, 2, 1, 1], "VALID") _test_dilation2d([1, 15, 15, 32], [4, 4, 32], [1, 1, 1, 1], [1, 2, 1, 1], "SAME") _test_dilation2d([1, 5, 5, 1], [2, 2, 1], [1, 1, 1, 1], [1, 1, 1, 1], "VALID") _test_dilation2d([1, 5, 5, 1], [3, 3, 1], [1, 1, 1, 1], [1, 2, 2, 1], "VALID") _test_dilation2d([1, 5, 5, 3], [3, 3, 3], [1, 1, 1, 1], [1, 1, 1, 1], "SAME") _test_dilation2d([1, 28, 28, 3], [5, 5, 3], [1, 2, 2, 1], [1, 1, 1, 1], "VALID") _test_dilation2d([1, 224, 224, 10], [8, 8, 10], [1, 1, 1, 1], [1, 1, 1, 1], "VALID") _test_dilation2d([1, 18, 18, 32], [4, 4, 32], [1, 1, 1, 1], [1, 2, 1, 1], "SAME") _test_dilation2d([1, 15, 15, 32], [4, 4, 32], [1, 1, 1, 1], [1, 2, 1, 1], "VALID") _test_dilation2d([1, 5, 5, 1], [7, 2, 1], [1, 3, 1, 1], [1, 1, 1, 1], "SAME") _test_dilation2d([1, 5, 5, 1], [3, 4, 1], [1, 2, 1, 1], [1, 2, 2, 1], "SAME") _test_dilation2d([1, 5, 5, 3], [3, 3, 3], [1, 1, 4, 1], [1, 1, 1, 1], "VALID") _test_dilation2d([1, 28, 28, 3], [5, 6, 3], [1, 1, 2, 1], [1, 1, 1, 1], "SAME") _test_dilation2d([1, 224, 224, 10], [8, 8, 10], [1, 3, 1, 1], [1, 1, 1, 1], "SAME") _test_dilation2d([1, 3, 3, 1], [2, 2, 1], [1, 1, 1, 1], [1, 2, 2, 1], "SAME") _test_dilation2d([1, 3, 3, 1], [2, 2, 1], [1, 1, 1, 1], [1, 1, 2, 1], "VALID") def _test_identityn(data_np_list): with tf.Graph().as_default(): data_tensors = [] data_tensors_name = [] for index, data_np in enumerate(data_np_list): tensor_name = f"data_{index}" data_tensors_name.append(tensor_name + ":0") data_tensors.append( tf.placeholder(shape=data_np.shape, dtype=str(data_np.dtype), name=tensor_name) ) output = tf.identity_n(data_tensors) output_names = [out.name for out in output] compare_tf_with_tvm( data_np_list, data_tensors_name, output_names, ) @pytest.mark.parametrize( "data_np_list", [ ( [ np.array([[1, 1], [0, 3], [0, 1], [2, 0], [3, 1]], dtype=np.int64), np.array([1, 2, 3, 4, 5], dtype=np.int64), np.array([5, 6], dtype=np.int64), ] ), ( [ np.array([[1, 1], [0, 3], [2, 0], [3, 1]], dtype=np.int64), np.array([1, 2, 3, 4], dtype=np.int64), np.array([5, 6], dtype=np.int64), np.array([True, False, True]), ] ), ( [ np.array([]), np.array([[]]), ] ), ], ) def test_forward_identityn(data_np_list): _test_identityn(data_np_list) ####################################################################### # infinity ops # ------------ def _verify_infiniteness_ops(tf_op, name): """test operator infinity ops""" # Only float types are allowed in Tensorflow for isfinite and isinf # float16 is failing on cuda tf_dtypes = ["float32", "float64"] for tf_dtype in tf_dtypes: shape = (8, 8) data = np.random.uniform(size=shape).astype(tf_dtype) data.ravel()[np.random.choice(data.size, int(data.size * 0.5), replace=False)] = np.infty data.ravel()[np.random.choice(data.size, int(data.size * 0.5), replace=False)] = np.nan tf.reset_default_graph() in_data = tf.placeholder(tf_dtype, shape, name="in_data") tf_op(in_data, name=name) compare_tf_with_tvm([data], ["in_data:0"], "{}:0".format(name)) def test_forward_isinf(): _verify_infiniteness_ops(tf.is_inf, "isinf") def test_forward_isfinite(): _verify_infiniteness_ops(tf.is_finite, "isfinite") def test_forward_isnan(): _verify_infiniteness_ops(tf.is_nan, "isnan") def _test_spop_placeholder_without_shape_info(): with tf.Graph().as_default(): @function.Defun(*[tf.int32] * 2) def Forward(x, y): print(x.name) print(y.name) b = tf.add(x, y) return b pl1 = tf.placeholder(tf.int32, name="pl1") pl2 = tf.placeholder(tf.int32, name="pl2") pl3 = tf.placeholder(tf.int32, name="pl3") data = np.array([[-1, 1], [2, -2]], dtype=np.int32) data2 = np.array([[-2, 3], [4, -6]], dtype=np.int32) data3 = np.array([[-2, 3], [4, -6]], dtype=np.int32) z1 = gen_functional_ops.StatefulPartitionedCall(args=[pl1, pl2], Tout=[tf.int32], f=Forward) z2 = z1 + pl3 compare_tf_with_tvm( [data, data2, data3], ["pl1:0", "pl2:0", "pl3:0"], ["StatefulPartitionedCall:0", z2.name], mode="vm", init_global_variables=True, ) def _test_spop_placeholder_with_shape_and_default_value(): with tf.Graph().as_default(): data = np.ones([1], dtype=int).astype(np.int32) dataVar = tf.Variable(data, shape=data.shape) pl1 = array_ops.placeholder_with_default(dataVar, shape=data.shape, name="pl1") tpl = tf.convert_to_tensor(pl1, dtype=tf.int32) @function.Defun(*[tf.int32]) def pl_with_default(pl): return tf.expand_dims(tf.multiply(pl, pl), 0) z = gen_functional_ops.StatefulPartitionedCall( args=[tpl], Tout=[tf.int32], f=pl_with_default ) compare_tf_with_tvm( data, ["pl1:0"], "StatefulPartitionedCall:0", mode="vm", init_global_variables=True ) def _test_spop_placeholder_numpy_arange_feed(): with tf.Graph().as_default(): t1 = tf.placeholder(tf.int32, (3, 3, 3), "t1") t1_data = np.arange(27, dtype=np.int32).reshape((3, 3, 3)) t2 = tf.placeholder(tf.int32, (3, 3, 3), "t2") t2_data = np.arange(27, dtype=np.int32).reshape((3, 3, 3)) @tf.function def add(x, y): return tf.add(x, y, "add_t1_t2") t3 = add(t1, t2) compare_tf_with_tvm( [t1_data, t2_data], ["t1:0", "t2:0"], [t3.name], mode="vm", init_global_variables=True ) def _test_spop_placeholder_numpy_array_feed(): with tf.Graph().as_default(): t1_data = np.array([[-1, 1, 3], [2, -2, 4], [2, -3, 14]], dtype=np.int32) t2_data = np.array([[-2, 1, 2], [12, -2, 14], [12, -3, 4]], dtype=np.int32) t1 = tf.placeholder(tf.int32, name="t1") t2 = tf.placeholder(tf.int32, name="t2") @tf.function def add(x, y): return tf.add(x, y, "add_t1_t2") t3 = add(t1, t2) compare_tf_with_tvm( [t1_data, t2_data], ["t1:0", "t2:0"], [t3.name], mode="vm", init_global_variables=True ) def _test_spop_function_invocation_basic(): with tf.Graph().as_default(): def fun1(a): return tf.multiply(a, a) def fun2(b): return tf.multiply(b, 10) @tf.function def fun3(x, y): x = fun2(x) y = fun1(y) z = tf.add(x, y) return z t3 = fun3(tf.constant(10.5), tf.constant(20.4)) compare_tf_with_tvm([], [], [t3.name], mode="vm", init_global_variables=True) def _test_spop_function_invocation_nested(): with tf.Graph().as_default(): t1 = tf.placeholder(tf.int32, (3, 3, 3), name="t1") t1_data = np.arange(27, dtype=np.int32).reshape((3, 3, 3)) t2 = tf.placeholder(tf.int32, name="t2") t2_data = np.arange(27, dtype=np.int32).reshape((3, 3, 3)) @tf.function def myfunc(x, y): return tf.add(x, y, "myfunc") @tf.function def myfunc2(x, y): z = myfunc(x, y) l = myfunc(z, y) m = myfunc(l, z) return tf.add(l, m, "myfunc2") res1 = myfunc(t1, t2) res2 = myfunc2(res1, t1) compare_tf_with_tvm( [t1_data, t2_data], ["t1:0", "t2:0"], [res2.name], mode="vm", init_global_variables=True ) def _test_spop_function_invocation_no_autograph(): with tf.Graph().as_default(): @tf.function(autograph=False) def fun1(a): return tf.multiply(a, a) @tf.function(autograph=False) def fun2(b): return tf.multiply(b, 10) @tf.function def fun3(x, y): x = fun2(x) y = fun1(y) z = tf.add(x, y) return z t3 = fun3(tf.constant(10.5), tf.constant(20.4)) compare_tf_with_tvm([], [], [t3.name], mode="vm", init_global_variables=True) def _test_spop_function_invocation_defun(): with tf.Graph().as_default(): def fun1(a): return tf.multiply(a, a) def fun2(b): return tf.multiply(b, b) @function.Defun(dtypes.float32, dtypes.float32, func_name="Fun3") def fun3(x, y): x = fun2(x) y = fun1(y) z = tf.add(x, y) return z op = gen_functional_ops.StatefulPartitionedCall( args=[tf.constant(10.5), tf.constant(20.4)], Tout=[dtypes.float32], f=fun3, name="SpopFnInvocation", ) compare_tf_with_tvm([], [], "SpopFnInvocation:0", mode="vm", init_global_variables=True) def _test_spop_arithmetic(): with tf.Graph().as_default(): @function.Defun(*[dtypes.int32] * 3) def arithmetic(m, x, c): z = tf.add(tf.multiply(m, x), c) return z m = tf.constant(10) x = tf.constant(20) c = tf.constant(2) spopFn = gen_functional_ops.StatefulPartitionedCall( args=[m, x, c], Tout=[tf.int32], f=arithmetic ) compare_tf_with_tvm( [], [], "StatefulPartitionedCall:0", mode="vm", init_global_variables=True ) def _test_spop_control_flow(): with tf.Graph().as_default(): @function.Defun(*[dtypes.float32] * 2) def Body1(x, y): with ops.device("/job:localhost/replica:0/task:0/device:CPU:0"): z = math_ops.multiply(x, y) i = 0 while i < 10: i += 1 if i == 5: continue z = math_ops.multiply(x, y * i) return z op = gen_functional_ops.StatefulPartitionedCall( args=[constant_op.constant(32.0), constant_op.constant(100.0)], Tout=[dtypes.float32], f=Body1, ) compare_tf_with_tvm( [], [], "StatefulPartitionedCall:0", mode="vm", init_global_variables=True ) def _test_spop_variables(): with tf.Graph().as_default(): const1 = tf.constant(10) const2 = tf.constant(20) var1 = tf.Variable(const1, dtype=tf.int32) var2 = tf.Variable(const2, dtype=tf.int32) @function.Defun(tf.int32, tf.int32) def Forward(x, y): return tf.multiply(x, y) z = gen_functional_ops.StatefulPartitionedCall( args=[var1, var2], Tout=[tf.int32], f=Forward ) compare_tf_with_tvm( [], [], "StatefulPartitionedCall:0", init_global_variables=True, mode="vm" ) def _test_spop_constants(): with tf.Graph().as_default(): @function.Defun(*[dtypes.int32] * 2) def constantsFn(x, y): vv = tf.constant([2, 3, 4], name="vv") z = tf.add(vv + x, y) return z a = tf.constant(20000, name="a") b = tf.constant(40000, name="b") spopFn = gen_functional_ops.StatefulPartitionedCall( args=[a, b], Tout=[tf.int32], f=constantsFn ) compare_tf_with_tvm( [], [], "StatefulPartitionedCall:0", mode="vm", init_global_variables=True ) def _test_spop_stateful(): # This test case is to test that TVM rejects any TF stateful operations # (including Resource Variables) except StatefulPartitionedCall/PartitionedCall # (as these two operators can still be used as container graphs to execute # "stateless" operations internally. tf.reset_default_graph() with tf.Graph().as_default(): @tf.function def FunctionWithStatefulOp_One(i): b = tf.random.uniform(shape=[2, 4], maxval=10, dtype=tf.float32, seed=10) y = tf.multiply(b, i) return y @tf.function def FunctionWithStatefulOp(m, n): a = tf.random.uniform(shape=[2, 4], maxval=10, dtype=tf.float32, seed=10) x = tf.multiply(a, m) y = FunctionWithStatefulOp_One(n) z = tf.multiply(x, y) return z op = FunctionWithStatefulOp(constant_op.constant(1.0), constant_op.constant(2.0)) with pytest.raises(Exception) as execinfo: compare_tf_with_tvm([], [], [op.name], init_global_variables=True, mode="vm") assert execinfo.value.args[0].startswith("The following operators are not implemented") def _test_spop_device_assignment(): # This test case is to test that TVM rejects inconsistent device assignment # while using StatefulPartitionedCall/PartitionedCall operators which in case of TVM will # be used as container graphs to internally execute "stateless" operations. tf.reset_default_graph() with tf.Graph().as_default(): def fun1(a): with ops.device("/GPU:0"): return tf.multiply(a, a) def fun2(b): with ops.device("/job:localhost/replica:0/task:0/device:CPU:1"): return tf.multiply(b, b) @function.Defun(dtypes.float32, dtypes.float32, func_name="Fun3") def fun3(x, y): with ops.device("/CPU:0"): x = fun2(x) with ops.device("/job:localhost/replica:0/task:0/device:CPU:2"): y = fun1(y) with ops.device("/job:localhost/replica:0/task:0/device:CPU:3"): z = tf.add(x, y) return z op = gen_functional_ops.StatefulPartitionedCall( args=[tf.constant(10.5), tf.constant(20.4)], Tout=[dtypes.float32], f=fun3 ) with pytest.raises(Exception) as execinfo: compare_tf_with_tvm( [], [], "StatefulPartitionedCall:0", mode="vm", init_global_variables=True ) assert execinfo.value.args[0].startswith("Found inconsistent Device assignment") def _test_spop_resource_variables(): # This test case is to test that TVM rejects any graph containing # resource variables with StatefulPartitionedOp. tf.reset_default_graph() with tf.Graph().as_default(): const1 = tf.constant(10) const2 = tf.constant(20) var1 = tf.Variable(const1, dtype=tf.int32, use_resource=True) var2 = tf.Variable(const2, dtype=tf.int32, use_resource=True) @tf.function def resourceVariablesTest(x, y): return tf.multiply(x, y) op = resourceVariablesTest(var1, var2) with pytest.raises(Exception) as execinfo: compare_tf_with_tvm( [], [], "StatefulPartitionedCall:0", mode="vm", init_global_variables=True ) assert execinfo.value.args[0].startswith("Graph is not frozen." " Provide a frozen graph") def test_forward_spop(): _test_spop_stateful() _test_spop_device_assignment() _test_spop_resource_variables() # Placeholder test cases _test_spop_placeholder_without_shape_info() _test_spop_placeholder_with_shape_and_default_value() _test_spop_placeholder_numpy_arange_feed() _test_spop_placeholder_numpy_array_feed() # Function Invocation test cases _test_spop_function_invocation_basic() _test_spop_function_invocation_nested() _test_spop_function_invocation_no_autograph() _test_spop_function_invocation_defun() # Test cases for various other TF constructs _test_spop_arithmetic() _test_spop_control_flow() _test_spop_variables() _test_spop_constants() ####################################################################### # Dynamic input shape # ------------------- def test_forward_dynamic_input_shape(): tf.reset_default_graph() with tf.Graph().as_default(): data = tf.placeholder(tf.float32, name="data", shape=(None,)) out = data + 1 np_data = np.random.uniform(size=(2,)).astype("float32") out_name = "add" with tf.Session() as sess: graph_def = tf_testing.AddShapesToGraphDef(sess, out_name) tf_output = run_tf_graph(sess, np_data, "data:0", ["{}:0".format(out_name)]) # TODO(kevinthesun): enable gpu test when VM heterogeneous execution is ready. for device in ["llvm"]: dev = tvm.device(device, 0) if not tvm.testing.device_enabled(device): print("Skip because %s is not enabled" % device) continue tvm_output = run_tvm_graph( graph_def, np_data, ["data"], 1, target=device, layout="NCHW", out_names=[out_name], mode="vm", ignore_in_shape=True, ) tvm.testing.assert_allclose(tvm_output[0], tf_output[0], rtol=1e-5, atol=1e-5) def test_forward_dynmaic_rnn_lstmblockcell(): if package_version.parse(tf.VERSION) >= package_version.parse("2.0.0"): return total_series_length = 50000 truncated_backprop_length = 15 state_size = 4 echo_step = 3 batch_size = 5 num_layers = 5 def generateData(): x = np.array(np.random.choice(2, total_series_length, p=[0.5, 0.5])) y = np.roll(x, echo_step) y[0:echo_step] = 0 x = x.reshape((batch_size, -1)) # The first index changing slowest, subseries as rows y = y.reshape((batch_size, -1)) return (x, y) batchX_placeholder = tf.placeholder(tf.float32, [batch_size, truncated_backprop_length]) init_state = tf.placeholder(tf.float32, [num_layers, 2, batch_size, state_size]) state_per_layer_list = tf.unstack(init_state, axis=0) rnn_tuple_state = tuple( [ tf.nn.rnn_cell.LSTMStateTuple( state_per_layer_list[idx][0], state_per_layer_list[idx][1] ) for idx in range(num_layers) ] ) # Forward passes def lstm_cell(): return tensorflow.contrib.rnn.LSTMBlockCell(state_size) cell = tf.nn.rnn_cell.MultiRNNCell( [lstm_cell() for _ in range(num_layers)], state_is_tuple=True ) states_series, current_state = tf.nn.dynamic_rnn( cell, tf.expand_dims(batchX_placeholder, -1), initial_state=rnn_tuple_state ) with tf.Session() as sess: sess.run(tf.global_variables_initializer()) x, y = generateData() _current_state = np.zeros((num_layers, 2, batch_size, state_size)) start_idx = 0 end_idx = start_idx + truncated_backprop_length batchX = x[:, start_idx:end_idx] # Save current state for TVM current_state_tvm = _current_state _current_state, _states_series = sess.run( [current_state, states_series], feed_dict={batchX_placeholder: batchX, init_state: _current_state}, ) # Organize results and corresponding names tf_output = [_states_series] for c in _current_state: tf_output.append(c.c) tf_output.append(c.h) name = [states_series.name.split(":")[0]] for t in current_state: name.append(t.c.name.split(":")[0]) name.append(t.h.name.split(":")[0]) graph_def = sess.graph.as_graph_def(add_shapes=True) final_graph_def = graph_util.convert_variables_to_constants(sess, graph_def, name) tvm_output = run_tvm_graph( final_graph_def, [batchX.astype("float32"), current_state_tvm.astype("float32")], ["Placeholder", "Placeholder_1"], out_names=name, num_output=len(name), mode="vm", disabled_pass=["FoldScaleAxis"], ) # Compare result for i in range(len(tf_output)): tvm.testing.assert_allclose(tf_output[i], tvm_output[i], atol=1e-5, rtol=1e-5) ####################################################################### # Unique # ------------ def _test_unique(n, dtype, is_dyn): tf.reset_default_graph() np_data = np.random.randint(100, size=n).astype(dtype) with tf.Graph().as_default(): if is_dyn: in_data = tf.placeholder(dtype, [n], name="in_data") else: in_data = tf.constant(np_data, dtype, name="in_data") tf.unique(in_data) if is_dyn: compare_tf_with_tvm(np_data, "in_data:0", ["Unique:0", "Unique:1"], mode="vm") else: compare_tf_with_tvm(None, "", ["Unique:0", "Unique:1"]) def test_forward_unique(): """test Unique""" for dtype in ["int32", "int64"]: for is_dyn in [False, True]: _test_unique(50, dtype, is_dyn) _test_unique(100, dtype, is_dyn) ####################################################################### # Unique with counts # ------------ def _test_unique_with_counts(n, dtype, is_dyn): tf.reset_default_graph() np_data = np.random.randint(100, size=n).astype(dtype) with tf.Graph().as_default(): if is_dyn: in_data = tf.placeholder(dtype, [n], name="in_data") else: in_data = tf.constant(np_data, dtype, name="in_data") tf.unique_with_counts(in_data) if is_dyn: compare_tf_with_tvm( np_data, "in_data:0", ["UniqueWithCounts:0", "UniqueWithCounts:1", "UniqueWithCounts:2"], mode="vm", ) else: compare_tf_with_tvm( None, "", ["UniqueWithCounts:0", "UniqueWithCounts:1", "UniqueWithCounts:2"] ) def test_forward_unique_with_counts(): """test UniqueWithCounts""" for dtype in ["int32", "int64"]: for is_dyn in [False, True]: _test_unique_with_counts(10, dtype, is_dyn) _test_unique_with_counts(20, dtype, is_dyn) ####################################################################### # check graph ir for nn.moments # ------------ def test_moments(): g = tf.Graph() shape = [4, 176, 8, 8] dtype = "float32" with g.as_default(): A = tf.placeholder(shape=shape, dtype=dtype, name="A") B = tf.placeholder(shape=shape, dtype=dtype, name="B") mean, variance = tf.nn.moments(A, [1], keep_dims=True) normalised_input = (A - mean) / tf.sqrt(variance + 0.0005) mod, _ = from_tensorflow(g.as_graph_def(add_shapes=True)) program = """ def @main(%A: Tensor[(4, 176, 8, 8), float32]) { %527 = mean(%A, axis=[1], keepdims=True) /* moments/mean */; %528 = subtract(%A, %527) /* sub */; %529 = subtract(%A, %527); %530 = multiply(%529, %529) /* moments/SquaredDifference */; %531 = mean(%530, axis=[1], keepdims=True) /* moments/variance */; %532 = add(%531, 0.0005f) /* add */; %533 = sqrt(%532) /* Sqrt */; divide(%528, %533) /* truediv */ } """ mod_golden = tvm.parser.parse('#[version = "0.0.5"]\n' + program) tvm.ir.assert_structural_equal(mod["main"].body, mod_golden["main"].body, map_free_vars=True) if __name__ == "__main__": pytest.main([__file__])
bot.py
# -*- coding: utf-8 -*- from linepy import * import time, threading ts = time.time() cl = LINE("ST", appName="IOS\t10.1.1\tiPhone X\t11.2.5") cl.log(cl.authToken) oepoll = OEPoll(cl) clMID = cl.profile.mid print(clMID) admin = ["u8b9d115e85202db06eb798e8c1b40ae9", "udfd61c9d62794fcae8323841b1fc4b83"] clog = [] cl.findAndAddContactsByMid("u8b9d115e85202db06eb798e8c1b40ae9") cl.sendMessage("u8b9d115e85202db06eb798e8c1b40ae9","======成功登入======\n======YTER取消機======\n登陸花費時間: {} 秒".format(time.time()-ts)) def cancelstart(to): group = cl.getGroup(to) gname = group.name group.name = "感謝邀請!此為𝓨𝓣𝓔𝓡公開取消機!" cl.updateGroup(group) time.sleep(1) if group.invitee is None: group.name = "本群組無須取消!" cl.updateGroup(group) time.sleep(1) group.name = gname cl.updateGroup(group) else: allinv = [contact.mid for contact in group.invitee if contact.mid not in admin] group.name = "即將取消人數:{}".format(str(len(allinv))) cl.updateGroup(group) time.sleep(1) group.name = "取消中請稍後..." cl.updateGroup(group) time.sleep(1) a = 1 for x in allinv: cl.cancelGroupInvitation(to,[x]) time.sleep(a) a += 0.5 group.name = "取消完成!" cl.updateGroup(group) time.sleep(1) group.name = gname cl.updateGroup(group) clog.append(to) def bot(op): try: if op.type == 5: cl.sendMessage("u8b9d115e85202db06eb798e8c1b40ae9",cl.getContact(op.param1).displayName+" 加入好友\nMID: "+op.param1) cl.findAndAddContactsByMid(op.param1) cl.sendMessage(op.param1,"安安!感謝您加我為好友!\n此為𝓨𝓣𝓔𝓡公開取消機!\n直接邀請入群即可取消!\n作者網址:line.me/ti/p/~najianlin\n作者友資:") cl.sendContact(op.param1,"u8b9d115e85202db06eb798e8c1b40ae9") if op.type == 13: if clMID in op.param3: cl.acceptGroupInvitation(op.param1) if op.param2 in admin: cl.sendMessage(op.param1, "創作者邀請入群\n群組指令清單\n[/cancel]取消全部邀請中\n[/bye]退群") else: cl.sendMessage("u8b9d115e85202db06eb798e8c1b40ae9",cl.getContact(op.param2).displayName+" 邀請取消機入群\nMID: "+op.param2) cancelstart(op.param1) time.sleep(1) cl.leaveGroup(op.param1) if op.type == 26 or op.type == 25: msg = op.message text = msg.text msg_id = msg.id receiver = msg.to sender = msg._from if msg.toType == 0: if sender != cl.profile.mid: to = sender else: to = receiver else: to = receiver if text is None: return if sender in admin: if text.lower() == 'glist': ret = "[取消群組列表]" if clog == []: ret += "\n列表為空" else: for x in clog: ret += "\n➲" + x cl.relatedMessage(to,str(ret) + "\n[結束]",msg_id) elif text.lower() == '/cancel': if msg.toType == 2: cancelstart(to) elif text.lower() == '/bye': if msg.toType == 1: cl.leaveRoom(to) elif msg.toType == 2: cl.leaveGroup(to) elif text.lower() == '/startup': profile = cl.getProfile() profile.displayName = "〖𝓨𝓣𝓔𝓡〗公開取消機" profile.statusMessage = "此為\n𝓨𝓣𝓔𝓡公開取消機\n---------------\n作者ID:\nnajianlin\n---------------\n功能:\n-取消群組所有邀請\n---------------" cl.updateProfile(profile) cl.updateProfilePicture('ytercancel.png') cl.relatedMessage(to,"初始化完成",msg_id) except Exception as e: print(e) def RunBot(): while True: try: ops = oepoll.singleTrace(count=50) if ops is not None: for op in ops: oepoll.setRevision(op.revision) bot(op) #thread = threading.Thread(target=bot, args=(op,)) #thread.start() except Exception as e: print(e) if __name__ == "__main__": RunBot()
radical-pilot-limits.py
#!/usr/bin/env python import os import time import socket import threading as mt import multiprocessing as mp threads = list() procs = list() files = list() sockets = list() t_max = 4 * 1024 p_max = 1 * 1024 f_max = 1 * 1024 s_max = 1 * 1024 def _work(): time.sleep(30) base = '/tmp/rp_limit_%d.%%d' % os.getpid() while True: try: f = open(base % len(files), 'w') files.append(f) if len(files) >= f_max: break except: break print('files : %5d' % len(files)) for f in files: os.unlink(f.name) f.close() host = 'localhost' port = 22 while True: try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((host, port)) sockets.append(s) if len(sockets) >= s_max: break except Exception: break print('sockets: %5d' % len(sockets)) for s in sockets: s.close() while True: try: t = mt.Thread(target=_work) t.start() threads.append(t) if len(threads) >= t_max: break except: break print('threads: %5d' % len(threads)) for t in threads: t.join() while True: try: p = mp.Process(target=_work) p.start() procs.append(p) if len(procs) >= p_max: break except: break print('procs : %5d' % len(procs)) for p in procs: p.join()
preprocessing.py
#!/usr/bin/python __author__ = 'Nitin' import threading import marisa_trie as mt import csv import os.path import marshal as Pickle from backports import lzma as lz IP6_INDEX_FILE = '_ip6_index.marisa' NAME_INDEX_FILE = '_name_index.marisa' def build_map(all_name_index_trie, all_target_index_trie, filename='../records/aaaa.csv'): filename = filename.replace('.csv', '.smat') if os.path.isfile(filename): print filename print 'nnz = ', sum(1 for line in open(filename)) return None with open(filename, 'rb') as f: reader = csv.reader(f) next(reader) current_target = None current_name = None count = 0 i = 0 triplets = list() for row in reader: i += 1 # if i == 11: # break if i % 10000000 == 0: print i prev_name, prev_target = current_name, current_target current_name, current_target = row[0], row[2] if prev_name != current_name and prev_name is not None: triplets.append((all_name_index_trie.key_id(unicode(prev_name)), all_target_index_trie.key_id(unicode(prev_target)), count)) # print prev_host, prev, count # keys.append(prev_host) # values.append((ip6_indexes.key_id(unicode(prev)), count)) count = 1 else: if prev_target != current_target and prev_target is not None: triplets.append((all_name_index_trie.key_id(unicode(prev_name)), all_target_index_trie.key_id(unicode(prev_target)), count)) # print prev_host, prev, count # keys.append(prev_host) # values.append((ip6_indexes.key_id(unicode(prev)), count)) count = 1 else: count += 1 triplets.append((all_name_index_trie.key_id(unicode(prev_name)), all_target_index_trie.key_id(unicode(prev_target)), count)) print filename print 'nnz = ', len(triplets) out_filename = filename.replace('.csv', '.smat') t = threading.Thread(target=store_in_smat, args=(triplets, out_filename)) t.start() return t # store_in_smat(triplets, out_file) # Pickle.dump(triplets, out_file) # out_file.close() def store_in_smat(triplets, out_filename): with open(out_filename, 'wb') as f: for triplet in triplets: f.write(str(triplet[0]) + ' ' + str(triplet[1]) + ' ' + str(triplet[2]) + '\n') # all_name_index_trie, all_target_index_trie = index_ip6address_4_aaaa() # # print dictionary['::ffff:74.117.221.143'] # build_map(all_name_index_trie, all_target_index_trie)
segments.py
import json import requests import logging import threading API_KEY = "<enter your edge_impulse_api_key>" projectId = 16681 headers = { "Accept": "application/json", "x-api-key": API_KEY } def segment(tid, ids): for sampleId in ids: url1 = "https://studio.edgeimpulse.com/v1/api/{}/raw-data/{}/find-segments".format(projectId, sampleId) payload1 = { "shiftSegments": True, "segmentLengthMs": 2000 } response1 = requests.request("POST", url1, json=payload1, headers=headers) resp1 = json.loads(response1.text) segments = resp1["segments"] if len(segments) == 0: continue payload2 = {"segments": segments} url2 = "https://studio.edgeimpulse.com/v1/api/{}/raw-data/{}/segment".format(projectId, sampleId) response2 = requests.request("POST", url2, json=payload2, headers=headers) logging.info('{} {} {}'.format(tid, sampleId, response2.text)) if __name__ == "__main__": format = "%(asctime)s: %(message)s" logging.basicConfig(format=format, level=logging.INFO, datefmt="%H:%M:%S") querystring = {"category":"training", "excludeSensors":"true"} url = "https://studio.edgeimpulse.com/v1/api/{}/raw-data".format(projectId) response = requests.request("GET", url, headers=headers, params=querystring) resp = json.loads(response.text) id_list = list(map(lambda s: s["id"], resp["samples"])) div = 8 n = int(len(id_list) / div) threads = list() for i in range(div): if i == (div - 1): ids = id_list[n*i: ] else: ids = id_list[n*i: n*(i+1)] x = threading.Thread(target=segment, args=(i, ids)) threads.append(x) x.start() for thread in threads: thread.join() logging.info("Finished")
modified_smbprotocol_transport.py
# -*- coding: utf-8 -*- # Copyright: (c) 2019, Jordan Borean (@jborean93) <jborean93@gmail.com> # MIT License (see LICENSE or https://opensource.org/licenses/MIT) # Added support to directly use socket.socket object instead of giving ip and port by (@nkpro2000sr) <naveenstudy2000sr@gmail.com> import logging import select import socket import struct import threading from collections import ( OrderedDict, ) from smbprotocol.structure import ( BytesField, IntField, Structure, ) try: from queue import Queue except ImportError: # pragma: no cover from Queue import Queue log = logging.getLogger(__name__) class DirectTCPPacket(Structure): """ [MS-SMB2] v53.0 2017-09-15 2.1 Transport The Directory TCP transport packet header MUST have the following structure. """ def __init__(self): self.fields = OrderedDict([ ('stream_protocol_length', IntField( size=4, little_endian=False, default=lambda s: len(s['smb2_message']), )), ('smb2_message', BytesField( size=lambda s: s['stream_protocol_length'].get_value(), )), ]) super(DirectTCPPacket, self).__init__() def socket_connect(func): def wrapped(self, *args, **kwargs): if not self._connected: log.info("Connecting to DirectTcp socket") try: self._sock = self.sock except (OSError, socket.gaierror) as err: raise ValueError("Failed to connect to '%s:%s': %s" % (self.server, self.port, str(err))) self._sock.settimeout(None) # Make sure the socket is in blocking mode. self._t_recv = threading.Thread(target=self.recv_thread, name="recv-%s:%s" % (self.server, self.port)) self._t_recv.daemon = True self._t_recv.start() self._connected = True func(self, *args, **kwargs) return wrapped class Tcp(object): MAX_SIZE = 16777215 def __init__(self, socket_object, recv_queue, timeout=None): self.sock = socket_object self.server = self.sock.getpeername()[0] self.port = self.sock.getpeername()[1] self.timeout = timeout self._connected = False self._sock = None self._recv_queue = recv_queue self._t_recv = None def close(self): if self._connected: log.info("Disconnecting DirectTcp socket") # Send a shutdown to the socket so the select returns and wait until the thread is closed before actually # closing the socket. self._connected = False self._sock.shutdown(socket.SHUT_RDWR) self._t_recv.join() self._sock.close() @socket_connect def send(self, header): b_msg = header data_length = len(b_msg) if data_length > self.MAX_SIZE: raise ValueError("Data to be sent over Direct TCP size %d exceeds the max length allowed %d" % (data_length, self.MAX_SIZE)) tcp_packet = DirectTCPPacket() tcp_packet['smb2_message'] = b_msg data = tcp_packet.pack() while data: sent = self._sock.send(data) data = data[sent:] def recv_thread(self): try: while True: select.select([self._sock], [], []) b_packet_size = self._sock.recv(4) if b_packet_size == b"": return packet_size = struct.unpack(">L", b_packet_size)[0] b_data = bytearray() bytes_read = 0 while bytes_read < packet_size: b_fragment = self._sock.recv(packet_size - bytes_read) b_data.extend(b_fragment) bytes_read += len(b_fragment) self._recv_queue.put(bytes(b_data)) except Exception as e: # Log a warning if the exception was raised while we were connected and not just some weird platform-ism # exception when reading from a closed socket. if self._connected: log.warning("Uncaught exception in socket recv thread: %s" % e) return finally: # Make sure we close the message processing thread in connection.py self._recv_queue.put(None)
test_fx.py
import builtins import contextlib import copy import functools import inspect import math import numbers import operator import os import pickle import sys import torch import traceback import warnings import unittest from math import sqrt from pathlib import Path from torch.multiprocessing import Process from torch.testing import FileCheck from torch.testing._internal.common_methods_invocations import op_db from torch.testing._internal.common_device_type import ops, onlyCPU, instantiate_device_type_tests import torch.utils._pytree as pytree import torch.fx._pytree as fx_pytree from torch.fx import symbolic_trace, Proxy, Node, GraphModule, Interpreter, Tracer, Transformer, Graph, wrap, PH import torch._C._fx from torch.fx.node import Target, Argument from torch.fx.passes import shape_prop from torch.fx.immutable_collections import immutable_dict, immutable_list from torch.fx.experimental.rewriter import RewritingTracer from torch.fx.operator_schemas import get_signature_for_torch_op from copy import deepcopy from torch.fx.proxy import TraceError from fx.test_subgraph_rewriter import TestSubgraphRewriter # noqa: F401 from fx.test_dce_pass import TestDCE # noqa: F401 from fx.test_fx_const_fold import TestConstFold # noqa: F401 from typing import Any, Callable, Dict, NamedTuple, List, Optional, Tuple, Union from torch.testing._internal.common_utils import run_tests, TEST_WITH_ROCM, IS_WINDOWS, IS_SANDCASTLE, IS_MACOS from torch.testing._internal.jit_utils import JitTestCase from fx.named_tup import MyNamedTup try: from torchvision import models as torchvision_models HAS_TORCHVISION = True except ImportError: HAS_TORCHVISION = False skipIfNoTorchVision = unittest.skipIf(not HAS_TORCHVISION, "no torchvision") class SimpleTest(torch.nn.Module): def forward(self, x): return torch.relu(x + 3.0) def a_non_torch_leaf(a, b): return a + b # Test wrap() passing both a function name as well as a function # directly def a_lifted_leaf(a, b): return a[0] + a[1] + b wrap('a_lifted_leaf') # Test wrapping twice doesn't break anything wrap('a_lifted_leaf') def a_lifted_leaf2(a, b): return a[0] + a[1] + b wrap(a_lifted_leaf2) wrap('len') @wrap def wrapped_via_decorator(a): return a + 1 wrap('wrapped_with_submodule') def wrapped_with_submodule(x: torch.Tensor, batchnorm1d: torch.nn.BatchNorm1d): return batchnorm1d(x) real_wrapped_via_decorator = wrapped_via_decorator real_a_lifed_leaf = a_lifted_leaf real_a_lifed_leaf2 = a_lifted_leaf2 _sqrt = sqrt wrap('wrapper_fn') def wrapper_fn(x): return torch.foo(x) class Pair(NamedTuple): x : torch.Tensor y : torch.Tensor # for testing pytrees class Foo(object): # noqa: B209 def __init__(self, a, b): self.a = a self.b = b class TestFX(JitTestCase): def setUp(self): if TEST_WITH_ROCM or IS_SANDCASTLE or IS_WINDOWS or IS_MACOS: return torch_root = Path(__file__).resolve().parent.parent p = torch_root / 'build' / 'lib' / 'libtorchbind_test.so' torch.ops.load_library(str(p)) def checkGraphModule(self, m: torch.nn.Module, args, kwargs=None): """Check that an nn.Module's results match the GraphModule version for a given set of args/kwargs. """ kwargs = kwargs if kwargs else {} ref_outs = m(*args, **kwargs) gm = symbolic_trace(m) gm.graph.lint() test_outs = gm(*args, **kwargs) self.assertEqual(ref_outs, test_outs) def test_graph_module(self): class MySub(torch.nn.Module): def __init__(self): super().__init__() self.w = torch.nn.Parameter(torch.rand(4, 3)) def forward(self, x): return self.w + x class MyModule(torch.nn.Module): def __init__(self): super().__init__() self.lin = torch.nn.Linear(4, 3) self.sub_mod = MySub() self.w = torch.nn.Parameter(torch.rand(3)) def forward(self, A, B, c): t = torch.sigmoid(A) + self.lin(c) return self.sub_mod(t.data + self.w + t + 1 - A + B // A + -A + A.add(B, alpha=3)) m = MyModule() gm = symbolic_trace(m) ms = torch.jit.script(gm) class M2(torch.nn.Module): def forward(self, A): m, idx = torch.max(A, 0) return m + 1, idx + 1 m2 = M2() gm2 = symbolic_trace(m2) class T(torch.nn.Module): def forward(self, A, b=4, *args, c=5, **kwargs): x = A + 1 + args[0] + kwargs['3'] return x t = T() symbolic_trace(t) def test_custom_import(self): graph = torch.fx.Graph() a = graph.placeholder('x') b = graph.placeholder('y') c = graph.call_function(a_non_torch_leaf, (a, b)) d = graph.call_function(torch.sin, (c,)) graph.output(d) gm = GraphModule(torch.nn.Module(), graph) x, y = torch.rand(1), torch.rand(1) self.assertEqual(torch.sin(x + y), gm(x, y)) def test_args_kwargs(self): class T(torch.nn.Module): def forward(self, *args, **kwargs): x = args[0] + kwargs['foo'] return x t = T() self.checkGraphModule(t, (torch.rand(1), torch.rand(1)), {'foo': torch.rand(1)}) def test_args_kwargs_no_self(self): class T(torch.nn.Module): def forward(*args, **kwargs): # noqa: B902 self = args[0] return torch.relu(args[1]) t = T() with self.assertRaisesRegex(RuntimeError, r'cannot be part of \*args expansion'): self.checkGraphModule(t, (torch.rand(1), torch.rand(1)), {'foo': torch.rand(1)}) def test_fx_shifts(self): class MyModule(torch.nn.Module): def forward(self, x): return x << 3, x >> 3 input = torch.LongTensor(10).random_(0, 1024) m = MyModule() self.checkGraphModule(m, (input,)) def test_dict(self): class MyDictMod(torch.nn.Module): def forward(self, d): return d['3'].relu(), {'4' : d['3'].neg()} input_dict = {'3': torch.rand(3, 4)} m = MyDictMod() self.checkGraphModule(m, (input_dict,)) def test_disallow_override(self): # Custom delegate to disallow in-place tensor operations class NoMutableCallTracer(Tracer): def create_node(self, kind : str, target : Union[str, Callable], args : Tuple[Argument, ...], kwargs : Dict[str, Any], name : Optional[str] = None, type_expr : Optional[Any] = None) -> Node: name = target if isinstance(target, str) else torch.typename(target) if name[-1] == '_': raise RuntimeError('In-place operations are not supported') return super().create_node(kind, target, args, kwargs, name) # Test method class MyInplaceMod(torch.nn.Module): def forward(self, x): x.add_(3.0) return x m = MyInplaceMod() with self.assertRaisesRegex(RuntimeError, 'In-place operations'): NoMutableCallTracer().trace(m) # Test free function class MyInplaceMod2(torch.nn.Module): def forward(self, x): torch.log_(x) return x m2 = MyInplaceMod2() with self.assertRaisesRegex(RuntimeError, 'In-place operations'): NoMutableCallTracer().trace(m2) # Test symbolic node as an arg class MyInplaceMod3(torch.nn.Module): def forward(self, x): y = torch.ones(3, 4) y.add_(x) return x m3 = MyInplaceMod3() with self.assertRaisesRegex(RuntimeError, 'In-place operations'): NoMutableCallTracer().trace(m3) def test_leaf_module(self): # Custom delegate to make it so that there are no leaf modules, everything # should get traced through class NoLeafModulesTracer(Tracer): def is_leaf_module(self, m, qualname): return False class MyReluMod(torch.nn.Module): def __init__(self): super().__init__() self.relu = torch.nn.ReLU() def forward(self, x): return self.relu(x) mrm = MyReluMod() sym = NoLeafModulesTracer().trace(mrm) for node in sym.nodes: self.assertNotEqual(node.op, 'call_module') sym.lint() def test_wrap(self): self.assertEqual(3 + 4 + 5, a_lifted_leaf((3, 4), 5)) def to_trace(y): return a_lifted_leaf((4, y), 3) + a_lifted_leaf((3, 4), 5) + a_lifted_leaf((y, y), y) m = symbolic_trace(to_trace) self.assertIn('a_lifted_leaf', m.code) self.assertEqual(27, m(2)) self.assertIs(a_lifted_leaf, real_a_lifed_leaf) def test_wrap_fn_directly(self): self.assertEqual(3 + 4 + 5, a_lifted_leaf2((3, 4), 5)) def to_trace(y): return a_lifted_leaf2((4, y), 3) + a_lifted_leaf2((3, 4), 5) + a_lifted_leaf2((y, y), y) m = symbolic_trace(to_trace) self.assertIn('a_lifted_leaf2', m.code) self.assertEqual(27, m(2)) self.assertIs(a_lifted_leaf2, real_a_lifed_leaf2) def test_wrapped_via_decorator(self): self.assertEqual(wrapped_via_decorator(0), 1) def to_trace(y): return wrapped_via_decorator(y) m = symbolic_trace(to_trace) self.assertIn('wrapped_via_decorator', m.code) self.assertEqual(m(0), 1) self.assertIs(wrapped_via_decorator, real_wrapped_via_decorator) self.assertFalse(hasattr(wrapped_via_decorator, "__fx_already_patched")) def test_wrap_with_submodule(self): class M(torch.nn.Module): def __init__(self): super(M, self).__init__() self.batchnorm1d = torch.nn.BatchNorm1d(2, affine=False) def forward(self, x: torch.Tensor): return wrapped_with_submodule(x, self.batchnorm1d) m = symbolic_trace(M()) self.assertIn("wrapped_with_submodule", m.code) input = torch.rand(3, 2) ref_batchnorm1d = torch.nn.BatchNorm1d(2, affine=False) self.assertEqual(ref_batchnorm1d(input), m(input)) def test_graph_edit_with_proxy(self): class M(torch.nn.Module): def forward(self, a, b): return a + b m = M() g = symbolic_trace(m).graph new_g = torch.fx.Graph() val_map : Dict[Node, Node] = {} output_val = new_g.graph_copy(g, val_map) t = Proxy(output_val) # test that we can use proxy objects to generate more graph code later for things that do not need to work with modules. new_g.output((t + t).node) gm = GraphModule(m, new_g) gm.graph.lint() self.assertEqual(gm(3, 4), 14) def test_graph_unique_names(self): class M(torch.nn.Module): def forward(self, a, b): return a + b m = M() g = symbolic_trace(m).graph new_g = torch.fx.Graph() val_map : Dict[Node, Node] = {} output_val = new_g.graph_copy(g, val_map) t = Proxy(output_val) # test that we can use proxy objects to generate more graph code later for things that do not need to work with modules. new_g.output((t + t).node) gm = GraphModule(m, new_g) seen_names : Set[str] = set() for node in gm.graph.nodes: assert node.name not in seen_names seen_names.add(node.name) def test_stack_traces(self): class M(torch.nn.Module): def forward(self, a, b): return a + b tracer = torch.fx.Tracer() tracer.record_stack_traces = True graph = tracer.trace(M()) for node in graph.nodes: if node.op == 'output': continue self.assertTrue(node.stack_trace is not None) assert 'test_fx.py' in node.stack_trace def test_graph_unique_names_manual(self): graph : torch.fx.Graph = torch.fx.Graph() a : torch.fx.Node = graph.create_node('placeholder', 'x') b : torch.fx.Node = graph.create_node('call_module', 'linear_mod', args=(a,), name='foo_1_1') c : torch.fx.Node = graph.create_node('get_attr', 'y_attr', name='foo_1') d : torch.fx.Node = graph.create_node('call_function', operator.add, args=(b, c)) graph.output(d) graph2 = torch.fx.Graph() val_map : Dict[Node, Node] = {} graph2.graph_copy(graph, val_map) seen_names : Set[str] = set() for node in graph2.nodes: assert node.name not in seen_names seen_names.add(node.name) def test_unpack(self): class M(torch.nn.Module): def forward(self, a, b): c, d = a return c + d + b a = (torch.rand(1), torch.rand(1)) b = torch.rand(1) m = M() self.checkGraphModule(m, (a, b)) def test_native_callable(self): if TEST_WITH_ROCM or IS_SANDCASTLE or IS_WINDOWS or IS_MACOS: raise unittest.SkipTest("non-portable load_library call used in test") # This test exercises the case where we use FX to translate from Python # code to some native callable object # # For the purposes of testing, we use ElementwiseInterpreter defined # in test_custom_class.cpp. # # We test that we can # 1) Construct a native callable from FX IR # 2) Construct a drop-in replacement module that delegates to the # native callable rather than the original code # 3) Run both the original code and native callable wrapper with # equivalent results # 4) TorchScript compile the native callable wrapper and confirm # equivalent results with the reference # 5) TorchScript serialize and deserialize the native callable # and confirm equivalent results with the reference # We use this simple Module as a reference computation class MySimpleMod(torch.nn.Module): def forward(self, x): return 3.0 * x + x msm = MySimpleMod() # This is what a lowering pass might look like: a function that takes # a valid nn.Module, symbolically traces it, lowers the Module to some # representation, and wraps that representation up into another # nn.Module instance that handles dispatch to the compiled/lowered code. def lower_to_elementwise_interpreter(orig_mod : torch.nn.Module) -> torch.nn.Module: # ===== Stage 1: Symbolic trace the module ===== mod = symbolic_trace(orig_mod) # ===== Stage 2: Lower GraphModule representation to the C++ # interpreter's instruction format ====== instructions = [] constant_idx = 0 constants = {} fn_input_names = [] target_to_name = { operator.add : "add", operator.mul : "mul" } output_node : Optional[Node] = None # For each instruction, create a triple # (instruction_name : str, inputs : List[str], output : str) # to feed into the C++ interpreter for n in mod.graph.nodes: target, args, out_name = n.target, n.args, n.name assert len(n.kwargs) == 0, "kwargs currently not supported" if n.op == 'placeholder': # Placeholders specify function argument names. Save these # for later when we generate the wrapper GraphModule fn_input_names.append(target) elif n.op == 'call_function': assert target in target_to_name, "Unsupported call target " + target arg_names = [] for arg in args: if not isinstance(arg, Node): # Pull out constants. These constants will later be # fed to the interpreter C++ object via add_constant() arg_name = f'constant_{constant_idx}' constants[arg_name] = torch.tensor( [arg] if isinstance(arg, numbers.Number) else arg) arg_names.append(arg_name) constant_idx += 1 else: arg_names.append(arg.name) instructions.append((target_to_name[target], arg_names, out_name)) elif n.op == 'output': if output_node is not None: raise RuntimeError('Multiple output nodes!') output_node = n else: raise RuntimeError('Unsupported opcode ' + n.op) interpreter = torch.classes._TorchScriptTesting._ElementwiseInterpreter() # Load constants for k, v in constants.items(): interpreter.add_constant(k, v) # Specify names for positional input arguments interpreter.set_input_names(fn_input_names) # Load instructions interpreter.set_instructions(instructions) # Specify name for single output assert isinstance(output_node.args[0], torch.fx.Node) interpreter.set_output_name(output_node.args[0].name) # ===== Stage 3: Create a wrapper GraphModule around the interpreter ===== class WrapperModule(torch.nn.Module): def __init__(self, interpreter): super().__init__() self.interpreter = interpreter wrapper = WrapperModule(interpreter) # Create a graph that: 1) Takes function arguments 2) Invokes the interpreter # 3) Returns the speficied return value # FIXME: The following code could be greatly simplified by symbolic_trace'ing # the wrapper with a Tracer that considers the Wrapper instance a root # module, however, I can't get `__call__` exposed on TorchBind classes # without it messing up Python `hasattr` for some reason. More digging # into CPython's implementation of hasattr is probably in order... graph = torch.fx.Graph() # Add placeholders for fn inputs placeholder_nodes = [] for name in fn_input_names: placeholder_nodes.append(graph.create_node('placeholder', name)) # Get the interpreter object interpreter_node = graph.create_node('get_attr', 'interpreter') # Add a node to call the interpreter instance output_node = graph.create_node( op='call_method', target='__call__', args=(interpreter_node, placeholder_nodes)) # Register output graph.output(output_node) graph.lint() # Return final GraphModule!!! return GraphModule(wrapper, graph) # Lower GraphModule to C++ interpreter lowered = lower_to_elementwise_interpreter(msm) # Compare correctness with original module x = torch.rand(3, 4) ref_out = msm(x) test_out = lowered(x) torch.testing.assert_allclose(test_out, ref_out) # Test TorchScript compilation scripted_lowered = torch.jit.script(lowered) script_out = scripted_lowered(x) torch.testing.assert_allclose(script_out, ref_out) # Test TorchScript ser/de import_copy = self.getExportImportCopy(scripted_lowered) imported_out = import_copy(x) torch.testing.assert_allclose(imported_out, ref_out) def test_reserved_getattr(self): """Ensure that we do not name any nodes with a reserved builtin like `getattr`""" class M(torch.nn.Module): def forward(self, a): return a.foo.bar.baz m = M() m_g = symbolic_trace(m) m_g.graph.lint() for node in m_g.graph.nodes: self.assertTrue(node.name != "getattr") def test_node_tagging(self): class TaggingTracer(Tracer): def create_node(self, kind : str, target : Union[str, Callable], args : Tuple[Argument, ...], kwargs : Dict[str, Any], name : Optional[str] = None, type_expr : Optional[Any] = None) -> Node: n = super().create_node(kind, target, args, kwargs, name) n.tag = 'foo' return n class M(torch.nn.Module): def forward(self, a, b): return a + b m = M() g = TaggingTracer().trace(m) g.lint() for n in g.nodes: self.assertTrue(hasattr(n, 'tag')) self.assertEqual(n.tag, 'foo') def test_tensor_attribute(self): class TensorAttribute(torch.nn.Module): def __init__(self): super().__init__() self.tensor = torch.rand(3, 4) def forward(self, x): return torch.nn.functional.linear(x, self.tensor) ta = TensorAttribute() traced = symbolic_trace(ta) traced(torch.rand(4, 4)) class WrapperForQualname(torch.nn.Module): def __init__(self): super().__init__() self.ta = TensorAttribute() def forward(self, x): return torch.nn.functional.linear(x, self.ta.tensor) wfq = WrapperForQualname() traced2 = symbolic_trace(wfq) traced2.graph.lint() traced2(torch.rand(4, 4)) def test_symbolic_trace_sequential(self): class Simple(torch.nn.Module): def forward(self, x): return torch.neg(x) seq = torch.nn.Sequential( Simple(), Simple(), Simple() ) traced = symbolic_trace(seq) traced.graph.lint() x = torch.rand(3, 4) self.assertEqual(traced(x), seq(x)) def test_tensor_constant(self): class ConstTensor(torch.nn.Module): def forward(self, x): return torch.nn.functional.linear(x, torch.zeros(3, 4)) ct = ConstTensor() traced = symbolic_trace(ct) traced.graph.lint() traced(torch.rand(4, 4)) def test_pickle_graphmodule(self): class Nested(torch.nn.Module): def __init__(self): super().__init__() self.st = torch.nn.Linear(4, 4) def forward(self, x): return self.st(x) n = Nested() traced = symbolic_trace(n) traced.graph.lint() pickled = pickle.dumps(traced) loaded = pickle.loads(pickled) loaded.graph.lint() x = torch.rand(3, 4) self.assertEqual(loaded(x), traced(x)) def test_pickle_custom_import(self): graph = torch.fx.Graph() a = graph.placeholder('x') b = graph.placeholder('y') c = graph.call_function(a_non_torch_leaf, (a, b)) d = graph.call_function(torch.sin, (c,)) graph.output(d) gm = GraphModule(torch.nn.Module(), graph) pickled = pickle.dumps(gm) loaded = pickle.loads(pickled) loaded.graph.lint() x, y = torch.rand(1), torch.rand(1) self.assertEqual(loaded(x, y), gm(x, y)) def test_all_input_nodes(self): graph : torch.fx.Graph = torch.fx.Graph() a : torch.fx.Node = graph.placeholder('x') b : torch.fx.Node = graph.call_module('linear_mod', args=(a,)) c : torch.fx.Node = graph.get_attr('y_attr') d : torch.fx.Node = graph.call_function(operator.add, args=(b, c)) e : torch.fx.Node = graph.call_function(torch.unsqueeze, args=(d, 0)) graph.output(e) graph.lint() self.assertEqual(b.all_input_nodes, [a]) self.assertEqual(c.all_input_nodes, []) self.assertEqual(d.all_input_nodes, [b, c]) self.assertEqual(e.all_input_nodes, [d]) def test_deepcopy_graphmodule_with_transform(self): st = SimpleTest() traced = symbolic_trace(st) traced.graph.lint() def transform(traced): new_graph = torch.fx.Graph() val_map : Dict[Node, Node] = {} output_value = new_graph.graph_copy(traced.graph, val_map) relu_out = new_graph.create_node( op='call_method', target='neg', args=(output_value,), kwargs={}) new_graph.output(relu_out) return GraphModule(traced, new_graph) transformed = transform(traced) transformed.graph.lint() copied = copy.deepcopy(transformed) self.assertNotEqual(id(type(transformed)), id(type(copied))) x = torch.randn(3, 4) self.assertEqual(copied(x), transformed(x)) def test_deepcopy_with_submods_params(self): class Bar(torch.nn.Module): def __init__(self): super().__init__() self.param = torch.nn.Parameter(torch.rand(3, 4)) def forward(self, x): return torch.relu(x) + self.param class Baz(torch.nn.Module): def __init__(self): super().__init__() self.param = torch.nn.Parameter(torch.rand(3, 4)) self.bar = Bar() def forward(self, x): return self.bar(x) - self.param baz = Baz() traced = symbolic_trace(baz) traced.graph.lint() copied = copy.deepcopy(traced) copied.graph.lint() def test_unpack_list_better_error(self): class SomeArgs(torch.nn.Module): def forward(self, a, b): return torch.rand(3, 4) class UnpacksList(torch.nn.Module): def __init__(self): super().__init__() self.sa = SomeArgs() def forward(self, x : list): return self.sa(*x) ul = UnpacksList() with self.assertRaisesRegex(TraceError, 'Proxy object cannot be iterated.'): symbolic_trace(ul) def test_unpack_dict_better_error(self): class SomeKwargs(torch.nn.Module): def forward(self, x=3, y=4): return torch.rand(3, 4) class UnpacksDict(torch.nn.Module): def __init__(self): super().__init__() self.sk = SomeKwargs() def forward(self, x : dict): return self.sk(**x) ud = UnpacksDict() with self.assertRaisesRegex(TraceError, 'Proxy object cannot be iterated.'): symbolic_trace(ud) def test_pretty_print_targets(self): # Test that Graph pretty-print prints friendly name for targets # in `operator` and `builtins` class SomeMod(torch.nn.Module): def forward(self, x): return torch.add(x.foo + x.bar, 3.0) traced = symbolic_trace(SomeMod()) graph_str = str(traced.graph) self.assertIn('builtins.getattr', graph_str) self.assertIn('operator.add', graph_str) self.assertIn('torch.add', graph_str) def test_pretty_print_node(self): class M(torch.nn.Module): def __init__(self): super().__init__() self.param: torch.nn.Parameter = torch.nn.Parameter( torch.rand(3, 4)) self.linear = torch.nn.Linear(4, 5) def forward(self, x: torch.Tensor, y: int = 2): return self.linear(x[y] + self.param).clamp(min=0.0, max=1.0) traced = symbolic_trace(M()) all_formatted = "\n".join([n.format_node() for n in traced.graph.nodes]) FileCheck().check("x").check("placeholder") \ .check("y").check("placeholder") \ .check("getitem").check("call_function") \ .check("param").check("get_attr") \ .check("add").check("call_function") \ .check("linear").check("call_module") \ .check("clamp").check("call_method") \ .run(all_formatted) def test_script_tensor_constant(self): # TorchScript seems to ignore attributes that start with `__`. # We used to call anonymous Tensor values `__tensor_constant*`, but # they were getting ignored by script. Now they're called # `_tensor_constant*` class IHaveATensorConstant(torch.nn.Module): def forward(self, x): return x + torch.rand(3, 4) traced = torch.fx.symbolic_trace(IHaveATensorConstant()) torch.jit.script(traced) def test_torch_fx_len(self): class FXLenTest(torch.nn.Module): def forward(self, x): return len(x) traced = symbolic_trace(FXLenTest()) self.assertEqual(traced(torch.rand(3, 4)), 3) # Test scriptability scripted = torch.jit.script(FXLenTest()) self.assertEqual(scripted(torch.rand(3)), 3) traced_scripted = torch.jit.script(traced) self.assertEqual(traced_scripted(torch.rand(3)), 3) # Test non-proxy len class FXLenTest2(torch.nn.Module): def __init__(self): super().__init__() self.l = [3, 4, 5] def forward(self, x): return x + len(self.l) traced2 = symbolic_trace(FXLenTest2()) inp = torch.rand(3, 4) self.assertEqual(traced2(inp), inp + 3.0) self.assertIs(len, builtins.len) def test_sqrt(self): class Sqrt1(torch.nn.Module): def forward(self, x): return sqrt(x.size(0)) class Sqrt2(torch.nn.Module): def forward(self, x): return math.sqrt(x.size(0)) class Sqrt3(torch.nn.Module): def forward(self, x): return x + math.sqrt(2) + sqrt(2) self.checkGraphModule(Sqrt1(), [torch.zeros(8)]) self.checkGraphModule(Sqrt2(), [torch.zeros(8)]) self.checkGraphModule(Sqrt3(), [torch.zeros(8)]) self.assertIs(sqrt, _sqrt) self.assertIs(math.sqrt, _sqrt) def test_torch_custom_ops(self): class M(torch.nn.Module): def forward(self, a): b = torch.ops.aten.sigmoid(a) c = torch.ops.aten.cat([a, b]) return torch.ops.aten.cat((c, c)) m = M() input = torch.randn(3) ref_out = m(input) gm = symbolic_trace(m) gm.graph.lint() out = gm(input) self.assertEqual(out, ref_out) def test_pickle_torch_custom_ops(self): class M(torch.nn.Module): def forward(self, a): b = torch.ops.aten.sigmoid(a) c = torch.ops.aten.cat([a, b]) return torch.ops.aten.cat((c, c)) m = M() input = torch.randn(3) ref_out = m(input) gm = symbolic_trace(m) gm.graph.lint() pickled = pickle.dumps(gm) loaded = pickle.loads(pickled) self.assertEqual(loaded(input), gm(input)) def test_pretty_print(self): st = SimpleTest() traced = symbolic_trace(st) traced.graph.lint() printed = str(traced) assert 'SimpleTest()' in printed assert 'torch.relu' in printed def test_pretty_print_graph(self): class KwargPrintTest(torch.nn.Module): def forward(self, x): return torch.squeeze(x + 3.0, dim=2) st = KwargPrintTest() traced = symbolic_trace(st) traced.graph.lint() stringed = str(traced.graph) for s in ['args', 'kwargs', '#users']: assert s in stringed def test_custom_proxy_type(self): class TensorPair: def __init__(self, left, right): self.left, self.right = left, right def add(self, other): l = self.left + other.left r = self.right + other.right return TensorPair(l, r) def mul(self, other): l = self.left * other.left r = self.right * other.right return TensorPair(l, r) def use_tensor_pair(x : TensorPair, y : TensorPair): s = x.add(y) return s.mul(x) x = TensorPair(torch.randn(5, 3), torch.randn(5, 3)) y = TensorPair(torch.randn(5, 3), torch.randn(5, 3)) ref_out = use_tensor_pair(x, y) traced = symbolic_trace(use_tensor_pair) traced_out = traced(x, y) self.assertEqual(traced_out.left, ref_out.left) self.assertEqual(traced_out.right, ref_out.right) def test_custom_proxy_type_literal(self): class TensorPair(metaclass=torch.fx.ProxyableClassMeta): def __init__(self, left, right): self.left, self.right = left, right def add(self, other): l = self.left + other.left r = self.right + other.right return TensorPair(l, r) def mul(self, other): l = self.left * other.left r = self.right * other.right return TensorPair(l, r) def use_tensor_pair_literal(x : TensorPair): s = x.add(TensorPair(torch.zeros(5, 3), torch.zeros(5, 3))) return s.mul(x) x = TensorPair(torch.randn(5, 3), torch.randn(5, 3)) ref_out = use_tensor_pair_literal(x) traced = symbolic_trace(use_tensor_pair_literal) traced_out = traced(x) self.assertEqual(traced_out.left, ref_out.left) self.assertEqual(traced_out.right, ref_out.right) def test_custom_proxy_dynamic_value(self): class TensorPair(metaclass=torch.fx.ProxyableClassMeta): def __init__(self, left, right): self.left, self.right = left, right def add(self, other): l = self.left + other.left r = self.right + other.right return TensorPair(l, r) def mul(self, other): l = self.left * other.left r = self.right * other.right return TensorPair(l, r) def use_tensor_pair_ctor(x : TensorPair, y : torch.Tensor): s = x.add(TensorPair(y, y)) return s.mul(x) x = TensorPair(torch.randn(5, 3), torch.randn(5, 3)) y = torch.randn(5, 3) ref_out = use_tensor_pair_ctor(x, y) traced = symbolic_trace(use_tensor_pair_ctor) traced_out = traced(x, y) self.assertEqual(traced_out.left, ref_out.left) self.assertEqual(traced_out.right, ref_out.right) def test_custom_proxy_input_dependent_control_flow(self): class ZeroTensor(metaclass=torch.fx.ProxyableClassMeta): def __init__(self, inp): if inp.sum() == 0: self.is_zero = True self.tensor = torch.tensor([]) else: self.is_zero = False self.tensor = inp def add(self, other): if self.is_zero: return ZeroTensor(other.tensor) elif other.is_zero: return self def use_zero_tensor(x : torch.Tensor, y : torch.Tensor): return ZeroTensor(x + y) x, y = torch.randn(5, 3), torch.randn(5, 3) ref_out = use_zero_tensor(x, y) traced = symbolic_trace(use_zero_tensor) traced_out = traced(x, y) self.assertEqual(traced_out.is_zero, ref_out.is_zero) self.assertEqual(traced_out.tensor, ref_out.tensor) def test_graph_fns(self): g = Graph() a = g.placeholder('a') b = g.call_module('linear', (a,)) c = g.get_attr('bias') d = g.call_method('add', (b, c)) e = g.call_function(torch.sin, (d,)) g.output(e) mod = torch.nn.Module() mod.linear = torch.nn.Linear(3, 4) mod.bias = torch.rand(4) gm = GraphModule(mod, g) gm.graph.lint() input = torch.rand(3) r = gm(input) ref = torch.sin(mod.linear(input) + mod.bias) self.assertEqual(r, ref) def test_remove_uses(self): g : torch.fx.Graph = Graph() x : torch.fx.Node = g.placeholder('x') relu : torch.fx.Node = g.call_function(torch.relu, (x,)) neg : torch.fx.Node = g.call_function(torch.neg, (relu,)) g.output(neg) neg.replace_all_uses_with(relu) g.erase_node(neg) self.assertTrue(neg not in relu.users) def test_nonetype_annotation(self): eb = torch.nn.EmbeddingBag(3, 4) symbolic_trace(eb) def test_pickle_nonetype_annotation(self): eb = torch.nn.EmbeddingBag(10, 3, mode='sum') traced = symbolic_trace(eb) pickled = pickle.dumps(traced) loaded = pickle.loads(pickled) loaded.graph.lint() input = torch.LongTensor([1, 2, 4, 5, 4, 3, 2, 9]) offsets = torch.LongTensor([0, 4]) self.assertEqual(loaded(input, offsets), traced(input, offsets)) def test_return_tuple(self): class M(torch.nn.Module): def forward(self, x: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: return (x, x + x) original = M() traced = symbolic_trace(original) self.assertEqual(traced(torch.ones(1)), original.forward(torch.ones(1))) def test_construct_root_dict(self): graph : torch.fx.Graph = torch.fx.Graph() a : torch.fx.Node = graph.create_node('placeholder', 'x') b : torch.fx.Node = graph.create_node('call_module', 'foo.bar.baz', args=(a,)) c : torch.fx.Node = graph.create_node('get_attr', 'zip.zap.zam') d : torch.fx.Node = graph.create_node('call_function', operator.add, args=(b, c)) graph.output(d) linear_mod : torch.nn.Module = torch.nn.Linear(3, 4) add_param : torch.Tensor = torch.rand(3, 4) gm : torch.fx.GraphModule = torch.fx.GraphModule( {'foo.bar.baz': linear_mod, 'zip.zap.zam' : add_param}, graph) gm.graph.lint() assert 'self.foo.bar.baz' in gm.code x : torch.Tensor = torch.rand(3, 3) out : torch.Tensor = gm(x) ref_out : torch.Tensor = linear_mod(x) + add_param self.assertEqual(out, ref_out) def test_symbolic_trace_assert(self): class AssertsTensorShape(torch.nn.Module): def forward(self, x): torch._assert(x.shape[1] > 4, "assert_foobar") return x m = AssertsTensorShape() # verify traceability traced = symbolic_trace(m) # verify assertion on traced model works correctly at runtime traced(torch.rand(4, 5)) with self.assertRaisesRegex(AssertionError, "assert_foobar"): traced(torch.rand(4, 3)) # verify the symbolically traced module is scriptable ms = torch.jit.script(m) with self.assertRaisesRegex(torch.jit.Error, "assert_foobar"): ms(torch.rand(4, 3)) def test_trace_fn_constant(self): some_constant = torch.rand(3, 4) def add_const(x): return some_constant + x traced = symbolic_trace(add_const) input = torch.rand(3, 4) self.assertEqual(traced(input), add_const(input)) def test_copy_no_remap(self): traced = symbolic_trace(SimpleTest()) g = traced.graph copied = torch.fx.Graph() for node in g.nodes: copied.node_copy(node) with self.assertRaisesRegex(RuntimeError, 'does not belong to this Graph'): copied.lint() def test_wrong_topo(self): graph : torch.fx.Graph = torch.fx.Graph() a : torch.fx.Node = graph.create_node('placeholder', 'x') b : torch.fx.Node = graph.create_node('call_module', 'foo.bar.baz', args=(a,)) c : torch.fx.Node = graph.create_node('get_attr', 'zip.zap.zam') d : torch.fx.Node = graph.create_node('call_function', operator.add, args=(b, c)) graph.output(d) nodes = list(graph.nodes) nodes[3].append(nodes[2]) with self.assertRaisesRegex(RuntimeError, 'was used before it has been defined'): graph.lint() def test_example_shape_prop(self): class TestCase(torch.nn.Module): def __init__(self): super().__init__() self.attr = torch.randn(3, 4) self.submod = torch.nn.Linear(4, 4) def forward(self, x): return torch.neg(self.submod(x.relu() + self.attr)) tc = TestCase() tc_traced = symbolic_trace(tc) ref_out = tc_traced(torch.rand(3, 4)) shape_prop.ShapeProp(tc_traced).propagate(torch.rand(3, 4)) # Make sure we're testing all opcodes opcodes = set() output_shape : Optional[torch.Shape] = None output_stride : Optional[Tuple[int]] = None for node in tc_traced.graph.nodes: opcodes.add(node.op) if node.op == 'output': output_shape = node.args[0].meta['tensor_meta'].shape output_stride = node.args[0].meta['tensor_meta'].stride self.assertEqual(opcodes, set(['placeholder', 'get_attr', 'call_function', 'call_method', 'call_module', 'output'])) # Test shape propogation and make sure results match actual self.assertEqual(output_shape, ref_out.shape) self.assertEqual(output_stride, ref_out.stride()) def test_shape_prop_layout(self): class ConvTest(torch.nn.Module): def __init__(self): super().__init__() self.conv_mod = torch.nn.Conv2d(5, 5, 3) def forward(self, x): return self.conv_mod(x) # contiguous layout test_mod = ConvTest() traced = symbolic_trace(test_mod) x = torch.randn(5, 5, 224, 224) shape_prop.ShapeProp(traced).propagate(x) assert(all(node.meta['tensor_meta'].memory_format is torch.contiguous_format for node in traced.graph.nodes)) x_channels_last = x.contiguous(memory_format=torch.channels_last) traced.to(memory_format=torch.channels_last) shape_prop.ShapeProp(traced).propagate(x_channels_last) for node in traced.graph.nodes: # NB: the implementation of conv may not preserve the memory format, # unfortunately. The best we can do is just check that the placeholder # node is channels-last if node.op in {'placeholder'}: self.assertEqual(node.meta['tensor_meta'].memory_format, torch.channels_last) def test_shape_prop_aggregate(self): class ReturnTwo(torch.nn.Module): def forward(self, x): return (3, torch.sum(x)) class UnderTest(torch.nn.Module): def __init__(self): super().__init__() self.rt = ReturnTwo() def forward(self, x): return self.rt(x) ut = UnderTest() class RTTracer(torch.fx.Tracer): def is_leaf_module(self, m, module_qualified_name): return type(m) is ReturnTwo graph = RTTracer().trace(ut) mod = torch.fx.GraphModule(ut, graph) shape_prop.ShapeProp(mod).propagate(torch.rand(3, 4)) for node in mod.graph.nodes: if node.op == 'call_module': assert 'tensor_meta' in node.meta tensor_meta = node.meta['tensor_meta'] assert tensor_meta[0] == 3 assert tensor_meta[1].shape == torch.Size([]) def test_shape_prop_layout_3d(self): class ConvTest3d(torch.nn.Module): def __init__(self): super().__init__() self.conv_mod = torch.nn.Conv3d(5, 5, 3) def forward(self, x): return self.conv_mod(x) test_mod_3d = ConvTest3d() traced_3d = symbolic_trace(test_mod_3d) x_3d = torch.randn(5, 5, 224, 224, 15) shape_prop.ShapeProp(traced_3d).propagate(x_3d) assert(all(node.meta['tensor_meta'].memory_format is torch.contiguous_format for node in traced_3d.graph.nodes)) x_channels_last_3d = x_3d.contiguous(memory_format=torch.channels_last_3d) traced_3d.to(memory_format=torch.channels_last_3d) shape_prop.ShapeProp(traced_3d).propagate(x_channels_last_3d) for node in traced_3d.graph.nodes: # NB: the implementation of conv may not preserve the memory format, # unfortunately. The best we can do is just check that the placeholder # node is channels-last if node.op in {'placeholder'}: self.assertEqual(node.meta['tensor_meta'].memory_format, torch.channels_last_3d) def test_interpreter(self): class MyModule(torch.nn.Module): def __init__(self): super().__init__() self.param = torch.nn.Parameter(torch.rand(3, 4)) self.linear = torch.nn.Linear(4, 5) def forward(self, x): return self.linear(x + self.param).clamp(min=0.0, max=1.0) m = MyModule() gm = torch.fx.symbolic_trace(m) interpreter = Interpreter(gm) input = torch.randn(3, 4) self.assertEqual(interpreter.run(input), gm(input)) self.assertEqual(interpreter.run(input), m(input)) def test_interpreter_run_node_override(self): class MyModule(torch.nn.Module): def __init__(self): super().__init__() self.param = torch.nn.Parameter(torch.rand(3, 4)) self.linear = torch.nn.Linear(4, 5) def forward(self, x): return self.linear(x + self.param).clamp(min=0.0, max=1.0) m = MyModule() gm = torch.fx.symbolic_trace(m) class RunNodeInterpreter(Interpreter): def __init__(self, module): super().__init__(module) def run_node(self, n : Node) -> Any: result = super().run_node(n) n.cached_value = result return result input = torch.randn(3, 4) RunNodeInterpreter(gm).run(input) for node in gm.graph.nodes: assert hasattr(node, 'cached_value') def test_interpreter_onthefly_swap(self): def fn(x): return torch.sigmoid(x).neg() gm = torch.fx.symbolic_trace(fn) class NegSigmSwapInterpreter(Interpreter): def call_function(self, target : Target, args : Tuple, kwargs : Dict) -> Any: if target == torch.sigmoid: return torch.neg(*args, **kwargs) return super().call_function(n) def call_method(self, target : Target, args : Tuple, kwargs : Dict) -> Any: if target == 'neg': call_self, *args_tail = args return call_self.sigmoid(*args_tail, **kwargs) return super().call_method(n) input = torch.randn(3, 4) result = NegSigmSwapInterpreter(gm).run(input) self.assertEqual(result, torch.neg(input).sigmoid()) def test_interpreter_partial_eval(self): class MyModule(torch.nn.Module): def __init__(self): super().__init__() self.param = torch.nn.Parameter(torch.rand(3, 4)) self.linear = torch.nn.Linear(4, 5) def forward(self, x): return self.linear(x + self.param).clamp(min=0.0, max=1.0) gm = torch.fx.symbolic_trace(MyModule()) interp = Interpreter(gm) env = {} for node in gm.graph.nodes: if node.op == 'call_module' and node.target == 'linear': env[node] = torch.arange(0, 12, 1).reshape(3, 4) - 6.0 break assert len(env) == 1 x = torch.randn(3, 4) result = interp.run(x, initial_env=env) self.assertEqual(result, (torch.arange(0, 12, 1).reshape(3, 4) - 6.0).clamp(0.0, 1.0)) def test_interpreter_star_args(self): def with_star_args(x, *args): return x + args[0] gm = torch.fx.symbolic_trace(with_star_args) interp = Interpreter(gm) result = interp.run(torch.ones(3, 4), torch.ones(3, 4), torch.rand(3, 4)) self.assertEqual(result, torch.ones(3, 4) * 2.0) @skipIfNoTorchVision def test_interpreter_noop_resnet18(self): rn18 = torchvision_models.resnet18() transformed = torch.fx.Transformer(symbolic_trace(rn18)).transform() inp = torch.randn(5, 3, 224, 224) self.assertEqual(transformed(inp), rn18(inp)) @skipIfNoTorchVision def test_interpreter_gc_values(self): rn18 = torchvision_models.resnet18() interp = Interpreter(symbolic_trace(rn18)) inp = torch.rand(5, 3, 224, 224) out = interp.run(inp) env_key_names = set(n.name for n in interp.env.keys()) self.assertEqual(env_key_names, set(['output'])) def test_transformer_noop(self): class MyModule(torch.nn.Module): def __init__(self): super().__init__() self.param = torch.nn.Parameter(torch.rand(3, 4)) self.linear = torch.nn.Linear(4, 5) def forward(self, x): return self.linear(x + self.param).clamp(min=0.0, max=1.0) m = MyModule() gm = torch.fx.symbolic_trace(m) new_gm = Transformer(gm).transform() input = torch.randn(3, 4) self.assertEqual(new_gm(input), gm(input)) def test_transformer_op_swap(self): def fn(x): return torch.sigmoid(x).neg() gm = torch.fx.symbolic_trace(fn) class NegSigmSwapXformer(Transformer): def call_function(self, target : Target, args : Tuple, kwargs : Dict) -> Any: if target == torch.sigmoid: return torch.neg(*args, **kwargs) return super().call_function(n) def call_method(self, target : Target, args : Tuple, kwargs : Dict) -> Any: if target == 'neg': call_self, *args_tail = args return call_self.sigmoid(*args_tail, **kwargs) return super().call_method(n) transformed = NegSigmSwapXformer(gm).transform() input = torch.randn(3, 4) self.assertEqual(transformed(input), torch.neg(input).sigmoid()) def test_transformer_multi_outputs(self): class MyModule(torch.nn.Module): def __init__(self): super().__init__() self.param = torch.nn.Parameter(torch.rand(3, 4)) self.linear = torch.nn.Linear(4, 5) def forward(self, x): x = x + self.param out = self.linear(x) return x, out m = MyModule() gm = torch.fx.symbolic_trace(m) new_gm = Transformer(gm).transform() input = torch.randn(3, 4) self.assertEqual(new_gm(input), gm(input)) def test_fn_type_annotations(self): class Foo(torch.nn.Module): def forward(self, p : Pair, z : torch.Tensor, i : int) -> Dict[str, torch.Tensor]: return {'a': p.x + p.y + z + i} foo_scripted = torch.jit.script(Foo()) foo_scripted(Pair(torch.rand(5), torch.rand(5)), torch.rand(5), 3) fxed = symbolic_trace(Foo()) fxed_scripted = torch.jit.script(fxed) fxed_scripted(Pair(torch.rand(5), torch.rand(5)), torch.rand(5), 3) def test_fn_type_annotation_empty(self): def forward(a : List[torch.Tensor]): return a[0] torch.jit.script(symbolic_trace(forward)) def test_wrapped_method(self): def wrap_with_relu(fn): @functools.wraps(fn) def wrapper(*args, **kwargs): return torch.relu(fn(*args, **kwargs)) return wrapper class Foo(torch.nn.Module): @wrap_with_relu def forward(self, x, w): return torch.matmul(x, w) f = Foo() traced = symbolic_trace(f) x, w = torch.rand(3, 4), torch.rand(4, 4) self.assertTrue(any(n.target == torch.relu for n in traced.graph.nodes)) def test_empty_graph_codegen(self): graph = torch.fx.Graph() gm = torch.fx.GraphModule(torch.nn.Module(), graph) self.assertEqual(gm(), None) def test_sequential(self): m = torch.nn.Sequential(torch.nn.Conv2d(1, 1, 1)) gm = torch.fx.symbolic_trace(m) gm_copy = copy.deepcopy(gm) def test_ctx_mgr(self): @contextlib.contextmanager def do_nothing(): yield class M(torch.nn.Module): def __init__(self): super().__init__() @do_nothing() def forward(self, x): return torch.relu(x) m = M() self.checkGraphModule(m, (torch.rand(3, 4),)) def test_typename_print(self): graph : torch.fx.Graph = torch.fx.Graph() x : torch.fx.Node = graph.create_node('placeholder', 'x') b : torch.fx.Node = graph.create_node('call_function', target=torch.relu, args=(x,), type_expr=List[float]) output : torch.fx.Node = graph.output(b) self.assertTrue('typing.List[float]' in str(graph)) def test_ellipsis(self): class M(torch.nn.Module): def __init__(self): super().__init__() def forward(self, x, y): return x + y[:, 1:10, ...] traced = symbolic_trace(M()) x, y = torch.rand(5, 9, 3, 4), torch.rand(5, 15, 3, 4) self.assertEqual(traced(x, y), x + y[:, 1:10, ...]) def test_inf_nan(self): class FooMod(torch.nn.Module): def forward(self, x): return x + float('inf'), x + float('-inf'), x + float('nan') fm = FooMod() self.checkGraphModule(fm, (torch.rand(3, 4),)) def test_inf_nan_kwds(self): graph : torch.fx.Graph = torch.fx.Graph() x : torch.fx.Node = graph.create_node('placeholder', 'x') b : torch.fx.Node = graph.create_node('call_function', operator.add, (x, float('inf')), {}, name='inf') c : torch.fx.Node = graph.create_node('call_function', operator.add, (x, float('nan')), {}, name='nan') graph.output((b, c)) gm = torch.fx.GraphModule(torch.nn.Module(), graph) x = torch.rand(3, 4) self.assertEqual(gm(x), (x + float('inf'), x + float('nan'))) def test_deepcopy_recursion_depth(self): depth = sys.getrecursionlimit() + 20 g = torch.fx.Graph() x = g.placeholder('x') for i in range(depth): x = g.call_function(torch.relu, (x,)) g.output(x) copied_graph = copy.deepcopy(g) val_map = {} for orig_node, new_node in zip(g.nodes, copied_graph.nodes): val_map[orig_node] = new_node for orig_node, new_node in zip(g.nodes, copied_graph.nodes): orig_users = set(orig_node.users.keys()) orig_users_equiv = set(val_map[u] for u in orig_users) new_users = set(new_node.users.keys()) self.assertEqual(orig_users_equiv, new_users) @skipIfNoTorchVision def test_replace_uses(self): rn18 = torchvision_models.resnet18() class LowerReluTracer(torch.fx.Tracer): def is_leaf_module(self, m : torch.nn.Module, qualname : str): if isinstance(m, torch.nn.ReLU): return False return super().is_leaf_module(m, qualname) rn18_traced = GraphModule(rn18, LowerReluTracer().trace(rn18)) to_erase = [] for node in rn18_traced.graph.nodes: if node.op == 'call_function' and node.target in [torch.relu, torch.nn.functional.relu]: kwargs = node.kwargs.copy() # Neg doesn't have in-place kwargs.pop('inplace') with rn18_traced.graph.inserting_before(node): new_node = rn18_traced.graph.call_function( the_function=torch.neg, args=node.args, kwargs=node.kwargs) node.replace_all_uses_with(replace_with=new_node) to_erase.append(node) for node in to_erase: rn18_traced.graph.erase_node(node) def test_replace_input(self): graph : torch.fx.Graph = torch.fx.Graph() x : torch.fx.Node = graph.create_node('placeholder', 'x') y : torch.fx.Node = graph.create_node('placeholder', 'y') b : torch.fx.Node = graph.create_node('call_function', target=torch.relu, args=(x,)) output : torch.fx.Node = graph.output(b) b.replace_input_with(x, y) gm = torch.fx.GraphModule(torch.nn.Module(), graph) input_x = torch.randn(33, 44) input_y = torch.randn(11, 22) self.assertEqual(gm(input_x, input_y), torch.relu(input_y)) def test_insertion_point(self): graph : torch.fx.Graph = torch.fx.Graph() x : torch.fx.Node = graph.create_node('placeholder', 'x') b : torch.fx.Node = graph.create_node('call_function', target=torch.relu, args=(x,)) output : torch.fx.Node = graph.output(b) with graph.inserting_before(b): neg : torch.fx.Node = graph.call_function(the_function=torch.neg, args=(x,)) _, *relu_args = b.args b.args = (neg, *relu_args) gm = torch.fx.GraphModule(torch.nn.Module(), graph) input = torch.randn(33, 44) self.assertEqual(gm(input), torch.relu(torch.neg(input))) def test_move_before(self): graph : torch.fx.Graph = torch.fx.Graph() x : torch.fx.Node = graph.create_node('placeholder', 'x') b : torch.fx.Node = graph.create_node('call_function', target=torch.relu, args=(x,)) output : torch.fx.Node = graph.output(b) neg : torch.fx.Node = graph.call_function(the_function=torch.neg, args=(x,)) _, *relu_args = b.args b.args = (neg, *relu_args) b.prepend(neg) gm = torch.fx.GraphModule(torch.nn.Module(), graph) input = torch.randn(33, 44) self.assertEqual(gm(input), torch.relu(torch.neg(input))) def test_erase_node_error(self): st = SimpleTest() traced = symbolic_trace(st) for node in traced.graph.nodes: # Test deleting with uses both in another Node and at the output if node.target in [operator.add, torch.relu]: with self.assertRaisesRegex(RuntimeError, 'but it still had .* users in the graph'): traced.graph.erase_node(node) def test_copy_it(self): d = immutable_dict([(3, 4), (5, 6)]) l = immutable_list([(3, 4), (5, 6)]) self.assertEqual(d, deepcopy(d)) self.assertEqual(l, deepcopy(l)) def test_get_torch_func_signature(self): for key in dir(torch): obj = getattr(torch, key) if callable(obj): schemas = get_signature_for_torch_op(obj) def test_find_uses(self): graph = torch.fx.Graph() x = torch.fx.Proxy(graph.placeholder('x')) y = torch.relu(x) z = x + x u = torch.neg(x) graph.output((y + z + u).node) graph.lint() users_of_x = x.node.users self.assertEqual(len(users_of_x), 3) expected_ops = set(['relu', 'add', 'neg']) for use in users_of_x: assert any(use.name.startswith(prefix) for prefix in expected_ops) def test_inline_graph(self): class InlineInto(torch.nn.Module): def forward(self, x): return torch.relu(x) class ToInline(torch.nn.Module): def forward(self, x): return torch.neg(x) inline_into = symbolic_trace(InlineInto()) to_inline = symbolic_trace(ToInline()) combined_graph = torch.fx.Graph() output_node = combined_graph.graph_copy(inline_into.graph, {}) input_node = list(to_inline.graph.nodes)[0] assert input_node and input_node.op == 'placeholder' val_map = {input_node : output_node} output = combined_graph.graph_copy(to_inline.graph, val_map) combined_graph.output(output) combined_module = torch.fx.GraphModule(torch.nn.Module(), combined_graph) input = torch.rand(3, 4) self.assertEqual(combined_module(input), input.relu().neg()) def test_multi_insert_point(self): graph = torch.fx.Graph() x = torch.fx.Proxy(graph.placeholder('x')) relu = torch.relu(x) with graph.inserting_before(relu.node): y = torch.neg(x) z = torch.tanh(y) graph.output((relu.node, z.node)) graph.lint() expected_ops = ['x', 'neg', 'tanh', 'relu'] for node, expected in zip(graph.nodes, expected_ops): assert expected in node.name def test_reassign_args_kwargs_uses(self): graph = torch.fx.Graph() x, y = Proxy(graph.placeholder('x')), Proxy(graph.placeholder('y')) z = x + y zed = z + z + z graph.output(zed.node) graph.lint() # zed = z + z + z -> zed = z + z + x zed.node.args = (zed.node.args[0], x.node) self.assertEqual(x.node.users.keys(), [z.node, zed.node]) # z = x + y -> z = y + y z.node.args = (y.node, y.node) self.assertEqual(x.node.users.keys(), [zed.node]) def test_trace_function(self): def foo(x, y): return torch.relu(x) + y x, y = torch.randn(3, 4), torch.randn(3, 4) self.checkGraphModule(foo, (x, y)) def test_trace_dict_int_keys(self): class ModWithDictArg(torch.nn.Module): def forward(self, d : Dict[int, torch.Tensor]): return d[42] class CallsModWithDict(torch.nn.Module): def __init__(self): super().__init__() self.m = ModWithDictArg() def forward(self, x): return self.m({42: x}) class MyTracer(torch.fx.Tracer): def is_leaf_module(self, m: torch.nn.Module, module_qualified_name : str) -> bool: return isinstance(m, ModWithDictArg) traced_graph = MyTracer().trace(CallsModWithDict()) def test_trace_dict_proxy_keys(self): class ModWithDictArg(torch.nn.Module): def forward(self, d : Dict[torch.Tensor, torch.Tensor]): return d[42] class CallsModWithDict(torch.nn.Module): def __init__(self): super().__init__() self.m = ModWithDictArg() def forward(self, x): return self.m({x: x}) class MyTracer(torch.fx.Tracer): def is_leaf_module(self, m: torch.nn.Module, module_qualified_name : str) -> bool: return isinstance(m, ModWithDictArg) with self.assertRaisesRegex(RuntimeError, 'cannot contain a Node'): traced_graph = MyTracer().trace(CallsModWithDict()) def test_direct_param_use(self): class TransposeTest(torch.nn.Module): def __init__(self): super().__init__() self.b = torch.nn.Parameter(torch.rand(4, 3)) def forward(self, x): return self.b class Foo(torch.nn.Module): def __init__(self): super().__init__() self.a = TransposeTest() def forward(self, x): return self.a.b, self.a.b.t(), self.a.b.view(12) traced = torch.fx.symbolic_trace(Foo()) assert(all('constant' not in node.target for node in traced.graph.nodes)) def test_single_default_arg(self): class M(torch.nn.Module): def __init__(self): super().__init__() def forward(self, y=1): return y m = M() self.checkGraphModule(m, ()) self.checkGraphModule(m, (3,)) def test_multiple_default_args(self): class M(torch.nn.Module): def __init__(self): super().__init__() def forward(self, y=1, z=2): return y + z m = M() self.checkGraphModule(m, ()) self.checkGraphModule(m, (3,)) self.checkGraphModule(m, (3, 4)) def test_regular_and_default_args(self): class M(torch.nn.Module): def __init__(self): super().__init__() def forward(self, x, y=1): return x + y m = M() self.checkGraphModule(m, (2,)) self.checkGraphModule(m, (2, 3)) def test_string_literal_return(self): class M(torch.nn.Module): def __init__(self): super().__init__() def forward(self): return "foo" m = M() self.checkGraphModule(m, ()) def test_namedtuple_return_qualname(self): class NamedTupReturn(torch.nn.Module): def forward(self, x): return MyNamedTup(x, x) traced = symbolic_trace(NamedTupReturn()) input = torch.rand(3, 4) self.assertEqual(traced(input), MyNamedTup(input, input)) def test_update_args_kwargs_yells_at_you(self): symtraced = symbolic_trace(SimpleTest()) node = next(iter(symtraced.graph.nodes)) with self.assertRaisesRegex(AttributeError, '__update_args_kwargs'): node.__update_args_kwargs((), {}) def test_torchbind_class_attribute_in_fx(self): if TEST_WITH_ROCM or IS_SANDCASTLE or IS_WINDOWS or IS_MACOS: self.skipTest("torch.classes._TorchScriptTesting._StackString is registered, skipping") class FooBar1234(torch.nn.Module): def __init__(self): super(FooBar1234, self).__init__() self.f = torch.classes._TorchScriptTesting._StackString(["3", "4"]) def forward(self): return self.f.top() m = FooBar1234() self.checkGraphModule(m, ()) def test_torchbind_class_attribute_in_fx_tensor_arg(self): if TEST_WITH_ROCM or IS_SANDCASTLE or IS_WINDOWS or IS_MACOS: self.skipTest("torch.classes._TorchScriptTesting._ReLUClass is registered, skipping") class FooBar2341(torch.nn.Module): def __init__(self): super(FooBar2341, self).__init__() self.f = torch.classes._TorchScriptTesting._ReLUClass() def forward(self, x): return self.f.run(x) m = FooBar2341() traced = symbolic_trace(m) input = torch.randn(3, 4) self.assertEqual(traced(input), m(input)) self.assertTrue(any(n.op == 'call_method' for n in traced.graph.nodes)) def test_script_method_trace(self): class Scripted(torch.nn.Module): def forward(self, x): return torch.relu(x) class Holder(torch.nn.Module): def __init__(self): super().__init__() self.s = torch.jit.script(Scripted()) def forward(self, x): return self.s(x) h = Holder() traced = symbolic_trace(h) input = torch.randn(3, 4) self.assertEqual(traced(input), h(input)) self.assertTrue(any(n.op == 'call_method' for n in traced.graph.nodes)) def test_namedtuple_return_trace(self): class NamedTupReturn(torch.nn.Module): def forward(self, x): return Pair(x, x) traced = symbolic_trace(NamedTupReturn()) input = torch.rand(3, 4) self.assertEqual(traced(input), Pair(input, input)) def test_return_type_exists(self): class ReturnTypeModule(torch.nn.Module): def other(self, x: List[str]) -> List[str]: return x def forward(self, x: List[str]) -> List[str]: return self.other(x) traced = symbolic_trace(ReturnTypeModule()) self.assertIn("-> typing_List[str]", traced._code) scripted = torch.jit.script(traced) self.assertIn("-> List[str]", scripted.code) def getitem_inner(self): class GetItemBase(torch.nn.Module): def __init__(self): super().__init__() self.register_buffer('pe', torch.randn(8, 8)) class GetItem1(GetItemBase): def forward(self, x): return self.pe[:, :x.size(0)] class GetItem2(GetItemBase): def forward(self, x): return self.pe[x.size(0)] class GetItem3(GetItemBase): def forward(self, x): return self.pe[4] # fx creates `self._tensor_constant0` here self.checkGraphModule(GetItem1(), [torch.zeros(4)]) self.checkGraphModule(GetItem2(), [torch.zeros(4)]) self.checkGraphModule(GetItem3(), [torch.zeros(4)]) @unittest.skipUnless(os.environ.get("FX_PATCH_GETITEM") == "1", "Will be checked in test_getitem_subproc") def test_getitem(self): self.getitem_inner() def test_getitem_subproc(self): # need to run this test in a subproc to work around: # https://github.com/pytorch/pytorch/issues/50710 proc = Process(target=run_getitem_target) proc.start() proc.join() self.assertEqual(proc.exitcode, 0) def test_user_friendly_call_provenance_with_function(self): def fn(x): return wrapper_fn(x) traced = torch.fx.symbolic_trace(fn) with self.assertRaisesRegex(RuntimeError, "'wrapper_fn' is " "being compiled since it was called" " from 'fn.forward'"): scripted = torch.jit.script(traced) def test_user_friendly_call_provenance_with_module(self): class M(torch.nn.Module): def forward(self, x): return wrapper_fn(x) traced = torch.fx.symbolic_trace(M()) with self.assertRaisesRegex(RuntimeError, "'wrapper_fn' is " "being compiled since it was called" " from 'M.forward'"): scripted = torch.jit.script(traced) def test_snake_case(self): class M(torch.nn.Module): def __init__(self): super(M, self).__init__() self.activations = torch.nn.ModuleDict([ ["snake_case", torch.nn.ReLU()], ["PascalCase", torch.nn.LeakyReLU()], ["ALL_CAPS", torch.nn.PReLU()] ]) def forward(self, x): a = self.activations["snake_case"](x) b = self.activations["PascalCase"](x) c = self.activations["ALL_CAPS"](x) return a, b, c traced = symbolic_trace(M()) check = [ ("activations_snake_case", "activations.snake_case"), ("activations_pascal_case", "activations.PascalCase"), ("activations_all_caps", "activations.ALL_CAPS") ] i = 0 for node in traced.graph.nodes: if node.op == "placeholder" or node.op == "output": continue name = check[i][0] target = check[i][1] self.assertEqual(name, node.name) self.assertEqual(target, node.target) i += 1 self.assertEqual(i, 3) def test_no_mutation(self): from torch.fx.immutable_collections import immutable_list x = immutable_list([3, 4]) with self.assertRaisesRegex(NotImplementedError, "new_args"): x[0] = 4 def test_partial_trace(self): class Foo(torch.nn.Module): def forward(self, x, y): if y: return 2 * x else: return x mod = Foo() mod_true = symbolic_trace(mod, concrete_args={'y': True}) mod_false = symbolic_trace(mod, concrete_args={'y': False}) self.assertEqual(mod_true(3, True), 6) print(mod_true.code) assert(any([i.target == torch._assert for i in mod_true.graph.nodes])) with self.assertRaises(AssertionError): mod_true(3, False) self.assertEqual(mod_false(3, False), 3) with self.assertRaises(AssertionError): mod_false(3, True) def f_higher(a, f): return f(a) nf = symbolic_trace(f_higher, concrete_args={'f': lambda x: x * 2}) self.assertEqual(nf(3, lambda x: x * 2), 6) def test_custom_traceback_raised_when_exception_source_is_graphmodule(self): class M(torch.nn.Module): def __init__(self): super(M, self).__init__() self.W = torch.nn.Parameter(torch.randn(5)) def forward(self, x): return torch.dot(self.W, x) traced = torch.fx.symbolic_trace(M()) out = [n for n in traced.graph.nodes if n.op == "output"][-1] with traced.graph.inserting_before(out): relu_out = traced.graph.call_method(method_name='relu', args=(out.args[0],)) out.args = (relu_out,) traced.recompile() with self.capture_stderr() as captured: with self.assertRaises(TypeError): traced(5) self.assertRegex(captured[0], r"Call using an FX-traced Module, line .* of the " r"traced Module's generated forward function:") def test_custom_traceback_not_raised_when_exception_source_is_submodule(self): class M(torch.nn.Module): def __init__(self): super().__init__() self.linear = torch.nn.Linear(3, 4) def forward(self, x): return self.linear(x) traced = torch.fx.symbolic_trace(M()) # Do not change this to `capture_stderr` or another context # manager without ensuring that the output is as expected try: traced(torch.rand(5, 5)) except RuntimeError: captured = traceback.format_exc() self.assertNotRegex(captured, r"Call using an FX-traced Module, line .* of the " r"traced Module's generated forward function:") def test_ast_rewriter_rewrites_assert(self): class M(torch.nn.Module): def forward(self, x: torch.Tensor, y: int, z: int): assert y == z return torch.add(x, x) ast_rewriter = RewritingTracer() graph = ast_rewriter.trace(M()) traced = GraphModule(ast_rewriter.root, graph, "gm") traced.graph.lint() def test_ast_rewriter_rewrites_assert_with_message(self): class M(torch.nn.Module): def forward(self, x: torch.Tensor, y: int, z: int): assert y == z, "msg" return torch.add(x, x) ast_rewriter = RewritingTracer() graph = ast_rewriter.trace(M()) traced = GraphModule(ast_rewriter.root, graph, "gm") traced.graph.lint() def test_ast_rewriter_reassigns_submodules(self): class M(torch.nn.Module): def __init__(self): super().__init__() self.bn = torch.nn.BatchNorm2d(100) def forward(self, x: torch.Tensor): return torch.add(x, x) ast_rewriter = RewritingTracer() graph = ast_rewriter.trace(M()) traced = GraphModule(ast_rewriter.root, graph, "gm") traced.graph.lint() def test_submodule_manipulation_API(self): class C(torch.nn.Module): def __init__(self): super(C, self).__init__() self.conv = torch.nn.Conv2d(16, 33, 3, stride=2) self.param = torch.nn.Parameter(torch.rand(2, 3)) def forward(self, x): return self.conv(torch.cat([self.param, x])) class B(torch.nn.Module): def __init__(self): super(B, self).__init__() self.linear = torch.nn.Linear(100, 200) self.register_buffer("buf", torch.randn(2, 3)) self.net_c = C() def forward(self, x): return self.linear(torch.cat([self.buf, self.net_c(x)])) class A(torch.nn.Module): def __init__(self): super(A, self).__init__() self.net_b = B() self.param = torch.nn.Parameter(torch.rand(2, 3)) def forward(self, x): return self.net_b(x) + self.param a = symbolic_trace(A()) a.add_submodule("net_b.net_c.dropout", torch.nn.Dropout(p=0.2)) conv = [n for n in a.graph.nodes if n.target == "net_b.net_c.conv"][-1] with a.graph.inserting_before(conv): dropout = a.graph.call_module(module_name="net_b.net_c.dropout", args=conv.args) conv.replace_all_uses_with(dropout) a.graph.erase_node(conv) a.recompile() def module_exists(gm: GraphModule, path: str) -> bool: return any(path == name for name, _ in gm.named_modules()) def parameter_exists(gm: GraphModule, path: str) -> bool: return (any(path == name for name, _ in gm.named_parameters()) and any(path == name for name in gm.state_dict().keys())) def buffer_exists(gm: GraphModule, path: str) -> bool: return (any(path == name for name, _ in gm.named_buffers()) and any(path == name for name in gm.state_dict().keys())) # Test that we added the "dropout" submodule self.assertTrue(module_exists(a, "net_b.net_c.dropout")) # Test `get_submodule` with an added submodule self.assertIsNotNone(a.get_submodule("net_b.net_c.dropout")) # Test that the "conv" submodule is still there self.assertTrue(module_exists(a, "net_b.net_c.conv")) # Test `get_submodule` with an original module self.assertIsNotNone(a.get_submodule("net_b.net_c.conv")) # Test that the "conv" node is NOT still there conv = [n for n in a.graph.nodes if n.target == "net_b.net_c.conv"] self.assertEqual(conv, []) a.delete_submodule("net_b.net_c.conv") # Test that the "conv" submodule is now gone self.assertFalse(module_exists(a, "net_b.net_c.conv")) # Test `get_submodule` with a deleted submodule with self.assertRaisesRegex(AttributeError, "has no attribute " "`conv`"): self.assertIsNone(a.get_submodule("net_b.net_c.conv")) # Test `get_attr` warnings cat = [n for n in a.graph.nodes if n.target == torch.cat][-1] with a.graph.inserting_before(cat): with warnings.catch_warnings(record=True) as w: param = a.graph.get_attr(qualified_name="net_b.net_c.param") self.assertEqual(len(w), 0) with self.assertWarnsRegex(UserWarning, "Attempted to " "insert a get_attr Node with no " "underlying reference in the " "owning GraphModule"): bad_param = a.graph.get_attr(qualified_name="net_b.param") a.graph.erase_node(bad_param) cat.args = (*cat.args, param) a.recompile() a.graph.lint() # Test `get_parameter` a.get_parameter("net_b.net_c.param") with self.assertRaisesRegex(AttributeError, "is not an " "nn.Parameter"): a.get_parameter("net_b.buf") with self.assertRaisesRegex(AttributeError, "has no attribute " "`param`"): a.get_parameter("net_b.param") # Test `get_buffer` a.get_buffer("net_b.buf") with self.assertRaisesRegex(AttributeError, "is not a " "buffer"): a.get_buffer("net_b.net_c.param") with self.assertRaisesRegex(AttributeError, "has no attribute " "`buf`"): a.get_buffer("net_b.net_c.buf") # Test non-nested attributes a.get_submodule("") a.get_parameter("param") # Insert some unused submodules a.add_submodule("net_b.embedding", torch.nn.Embedding(10, 3)) a.add_submodule("net_b.net_c.embedding", torch.nn.Embedding(10, 3)) a.add_submodule("net_b.net_c.rnn", torch.nn.RNN(10, 20, 2)) a.add_submodule("batch_norm_2d", torch.nn.BatchNorm2d(100)) # Garbage collection a.delete_all_unused_submodules() # Test that all the unused submodules are gone self.assertFalse(module_exists(a, "net_b.embedding")) self.assertFalse(module_exists(a, "net_b.net_c.embedding")) self.assertFalse(module_exists(a, "net_b.net_c.rnn")) self.assertFalse(module_exists(a, "batch_norm_2d")) # Test that we didn't delete any unused Parameters or buffers self.assertTrue(parameter_exists(a, "net_b.net_c.param")) self.assertTrue(buffer_exists(a, "net_b.buf")) a.graph.lint() def _test_graph_module_init_buffer_param_copied(self, use_dict_init: bool): class MyModule(torch.nn.Module): def __init__(self): super().__init__() self.register_buffer("my_buff", torch.rand(3, 4)) self.register_parameter( "my_param", torch.nn.Parameter(torch.rand(3, 4)) ) def forward(self, x): return x + self.my_buff + self.my_param mod = MyModule() mod_traced = symbolic_trace(mod) # Create new GraphModule based on original, either w/ dict or root module. orig_buff = mod_traced.get_buffer("my_buff") orig_param = mod_traced.get_parameter("my_param") mod_traced_new = GraphModule( {"my_buff": orig_buff, "my_param": orig_param} if use_dict_init else mod, mod_traced.graph, ) # Check that both my_buff and my_param are found and the same. try: new_buff = mod_traced_new.get_buffer("my_buff") except Exception: self.fail("Did not find my_buff") self.assertEqual(orig_buff, new_buff) try: new_param = mod_traced_new.get_parameter("my_param") except Exception: self.fail("Did not find my_param") self.assertEqual(orig_param, new_param) x = torch.rand(3, 4) orig_out = mod_traced(x) submodules_out = mod_traced_new(x) self.assertEqual(orig_out, submodules_out) def test_graph_module_init_buffer_param_copied_dict_init(self): self._test_graph_module_init_buffer_param_copied(use_dict_init=True) def test_graph_module_init_buffer_param_copied_mod_init(self): self._test_graph_module_init_buffer_param_copied(use_dict_init=False) def test_annotations_with_no_forward_references(self): class A: def __call__(self, x: torch.Tensor): return torch.add(x, x) class M(torch.nn.Module): def forward(self, x: torch.Tensor, a: A) -> torch.Tensor: return a(x) self.checkGraphModule(M(), (torch.rand(2, 3), A()), kwargs=None) def test_annotations_with_forward_references(self): class A: def __call__(self, x: torch.Tensor): return torch.add(x, x) class M(torch.nn.Module): def forward(self, x: 'torch.Tensor', a: 'A') -> 'torch.Tensor': return a(x) self.checkGraphModule(M(), (torch.rand(2, 3), A()), kwargs=None) def test_annotations_with_non_torch_reference_and_no_internal_forward_references(self): class A: def __call__(self, x: torch.Tensor): return torch.add(x, x) class M(torch.nn.Module): def forward(self, x: List[torch.Tensor], a: A) -> torch.Tensor: return a(x[0]) self.checkGraphModule(M(), (torch.rand(2, 3), A()), kwargs=None) def test_annotations_with_non_torch_reference_and_internal_forward_references(self): class A: def __call__(self, x: torch.Tensor): return torch.add(x, x) class M(torch.nn.Module): def forward(self, x: List['torch.Tensor'], a: A) -> 'torch.Tensor': return a(x)[0] self.checkGraphModule(M(), (torch.rand(2, 3), A()), kwargs=None) @unittest.skipIf(sys.version_info < (3, 7), "`__future__` feature " "`annotations` is not defined in Python <3.7") def test_annotation_with_future(self): try: import fx.test_future # noqa: F401 finally: del sys.modules["__future__"] @skipIfNoTorchVision def test_cpatcher(self): cnt = 0 def patched_impl(to_patch, args, kwargs): nonlocal cnt cnt += 1 return to_patch(*args, **kwargs) c_patch_enabled = True def patched_in(to_patch, args, kwargs): nonlocal c_patch_enabled try: c_patch_enabled = False r = patched_impl(to_patch, args, kwargs) finally: c_patch_enabled = True return r def trace_func(frame, action, arg): if action == 'c_call': if c_patch_enabled: torch._C._fx.patch_function(arg, patched_in) import torch rn = torchvision_models.resnet18() try: sys.setprofile(trace_func) rn(torch.rand(1, 3, 224, 224)) print("testing print patch") finally: sys.setprofile(None) assert(cnt != 0) def test_randn(self): def f(): return torch.randn(3, 3) fx_f = symbolic_trace(f, enable_cpatching=True) assert(any(i.target == torch.randn for i in fx_f.graph.nodes)) fx_f = symbolic_trace(f, enable_cpatching=False) assert(all(i.target != torch.randn for i in fx_f.graph.nodes)) fx_f = symbolic_trace(f, enable_cpatching=True) assert(any(i.target == torch.randn for i in fx_f.graph.nodes)) def test_pytree(self): def f_sum(x): return sum(x) def f_sum_dict(x): out = 0 for k, v in x.items(): out += v return out def f_dict_list_map(x): new_dict = {} for k, v in x.items(): new_dict[k] = [i + 1 for i in v] return new_dict def f_dict_add(x): return x['a'] + sum(x['z']) pytree._register_pytree_node( Foo, lambda x: ([x.a, x.b], None), lambda x, _: Foo(x[0], x[1]), ) fx_pytree.register_pytree_flatten_spec(Foo, lambda x, _: [x.a, x.b]) def f_custom(x): return x.a + x.b def f_custom_dict(x): return f_sum_dict(x.a) + x.b def f_return_custom(x): return Foo(x.b, x.a) tests = [ (f_sum, [PH, PH, PH]), (f_sum, []), (f_sum_dict, {'a': PH, 'b': PH, 'c': PH}), (f_dict_list_map, {'a': (PH, PH), 'b': [PH], 'c': []}), (f_dict_list_map, {5: (PH, PH, PH)}), (f_dict_add, {'a': PH, 'z': (PH, PH, PH)}), (f_dict_add, {'a': PH, 'z': []}), (f_custom, Foo(PH, PH)), (f_custom, Foo(PH, 3)), (f_custom_dict, Foo({'a': PH, 'b': PH}, PH)), # (f_return_custom, Foo(PH, PH)), # Don't currently support output pytrees ] def verify_pytree(f, inp): val = pytree.tree_map(lambda x: torch.randn(3) if x == PH else x, inp) num_flat_args = len([i == PH for i in pytree.tree_flatten(inp)[0]]) orig_out = f(val) nf = symbolic_trace(f, concrete_args={'x': inp}) self.assertEqual(nf(val), orig_out) assert num_flat_args == 0 or "tree_flatten_spec" in nf.code assert(sum([i.op == 'placeholder' for i in nf.graph.nodes]) == num_flat_args) nf = symbolic_trace(nf) self.assertEqual(nf(val), orig_out) assert "tree_flatten_spec" not in nf.code assert(sum([i.op == 'placeholder' for i in nf.graph.nodes]) == 1) nf = symbolic_trace(nf, concrete_args={'x': inp}) self.assertEqual(nf(val), orig_out) assert num_flat_args == 0 or "tree_flatten_spec" in nf.code assert(sum([i.op == 'placeholder' for i in nf.graph.nodes]) == num_flat_args) pickled = pickle.dumps(nf) nf = pickle.loads(pickled) self.assertEqual(nf(val), orig_out) for f, inp in tests: verify_pytree(f, inp) def test_pytree_concrete(self): def f(b, a): if b: return a['a'] else: return a['z'] inp = {'a': {'a': PH, 'z': PH}, 'b': True} nf = symbolic_trace(f, concrete_args=inp) val = pytree.tree_map(lambda x: torch.randn(3) if x == PH else x, inp) self.assertEqual(nf(**val), f(**val)) nf = symbolic_trace(nf) self.assertEqual(nf(**val), f(**val)) def run_getitem_target(): from torch.fx.symbolic_trace import _wrapped_methods_to_patch _wrapped_methods_to_patch.append((torch.Tensor, "__getitem__")) try: TestFX().getitem_inner() finally: _wrapped_methods_to_patch.pop() class TestOperatorSignatures(JitTestCase): @onlyCPU @ops(op_db, allowed_dtypes=(torch.float,)) def test_get_torch_func_signature_exhaustive(self, device, dtype, op): # Sorted and one entry on each line to minimize merge conflicts. known_no_schema = {'cdist', 'dstack', 'einsum', 'expand', 'expand_as', 'hstack', 'linalg.multi_dot', 'polygamma', 'repeat', 'reshape_as', 'stack', 'view', 'view_as', 'nn.functional.hardshrink', 'vstack', '__getitem__', '__radd__', '__rsub__', '__rmul__', '__rdiv__', '__rpow__'} try: sample_inputs_itr = op.sample_inputs(device, dtype, requires_grad=False) schemas = get_signature_for_torch_op(op.op) if not schemas: raise RuntimeError('No Schemas Returned') for sample_input in sample_inputs_itr: # Iterate through overloads until we hit a match. If we exit this # loop via `else`, we haven't found a match for schema in schemas: try: bound_args = schema.bind(sample_input.input, *sample_input.args, **sample_input.kwargs) bound_args.apply_defaults() op(*bound_args.args, **bound_args.kwargs) break except TypeError as e: pass else: raise RuntimeError(f'Did not match any schemas for op {op.name}!') except Exception as e: assert op.name in known_no_schema or "nn.functional" in op.name class TestFunctionalTracing(JitTestCase): IGNORE_FUNCS = ("has_torch_function", "has_torch_function_unary", "has_torch_function_variadic", "handle_torch_function", "boolean_dispatch") TO_PATCH = {"has_torch_function": None, "has_torch_function_unary": None, "has_torch_function_variadic": None} BUILT_IN_FUNC = (AssertionError, "") PROXY_ITERABLE = (TypeError, r"argument of type 'Proxy' is not iterable") PROXY_ITERATED = (TraceError, r"Proxy object cannot be iterated") LEN_ERROR = (RuntimeError, r"'len' is not supported in symbolic tracing by default") ARG_TYPE_MISMATCH = (TypeError, r", not Proxy$") CONTROL_FLOW = (TraceError, r"symbolically traced variables cannot be used as inputs to control flow") INTERPOLATE_ARGS_CONFLICT = (ValueError, r"only one of size or scale_factor should be defined") UNTRACEABLE_FUNCTIONALS = { "adaptive_avg_pool1d": BUILT_IN_FUNC, "avg_pool1d": BUILT_IN_FUNC, "avg_pool2d": BUILT_IN_FUNC, "avg_pool3d": BUILT_IN_FUNC, "celu_": BUILT_IN_FUNC, "channel_shuffle": BUILT_IN_FUNC, "conv1d": BUILT_IN_FUNC, "conv2d": BUILT_IN_FUNC, "conv3d": BUILT_IN_FUNC, "conv_tbc": BUILT_IN_FUNC, "conv_transpose1d": BUILT_IN_FUNC, "conv_transpose2d": BUILT_IN_FUNC, "conv_transpose3d": BUILT_IN_FUNC, "cosine_similarity": BUILT_IN_FUNC, "elu_": BUILT_IN_FUNC, "hardtanh_": BUILT_IN_FUNC, "leaky_relu_": BUILT_IN_FUNC, "logsigmoid": BUILT_IN_FUNC, "one_hot": BUILT_IN_FUNC, "pdist": BUILT_IN_FUNC, "pixel_shuffle": BUILT_IN_FUNC, "pixel_unshuffle": BUILT_IN_FUNC, "relu_": BUILT_IN_FUNC, "rrelu_": BUILT_IN_FUNC, "selu_": BUILT_IN_FUNC, "softplus": BUILT_IN_FUNC, "softshrink": BUILT_IN_FUNC, "threshold_": BUILT_IN_FUNC, "adaptive_avg_pool2d": LEN_ERROR, "adaptive_avg_pool3d": LEN_ERROR, "adaptive_max_pool2d_with_indices": LEN_ERROR, "adaptive_max_pool3d_with_indices": LEN_ERROR, "instance_norm": CONTROL_FLOW, "pad": LEN_ERROR, "adaptive_max_pool1d": PROXY_ITERABLE, "adaptive_max_pool2d": PROXY_ITERABLE, "adaptive_max_pool3d": PROXY_ITERABLE, "fractional_max_pool2d": PROXY_ITERABLE, "fractional_max_pool3d": PROXY_ITERABLE, "max_pool1d": PROXY_ITERABLE, "max_pool2d": PROXY_ITERABLE, "max_pool3d": PROXY_ITERABLE, "group_norm": PROXY_ITERATED, "lp_pool2d": PROXY_ITERATED, "max_unpool1d": PROXY_ITERATED, "max_unpool2d": PROXY_ITERATED, "max_unpool3d": PROXY_ITERATED, "adaptive_max_pool1d_with_indices": ARG_TYPE_MISMATCH, "fractional_max_pool2d_with_indices": ARG_TYPE_MISMATCH, "fractional_max_pool3d_with_indices": ARG_TYPE_MISMATCH, "hardshrink": ARG_TYPE_MISMATCH, "layer_norm": ARG_TYPE_MISMATCH, "lp_pool1d": ARG_TYPE_MISMATCH, "max_pool1d_with_indices": ARG_TYPE_MISMATCH, "max_pool2d_with_indices": ARG_TYPE_MISMATCH, "max_pool3d_with_indices": ARG_TYPE_MISMATCH, "pairwise_distance": ARG_TYPE_MISMATCH, "affine_grid": CONTROL_FLOW, "alpha_dropout": CONTROL_FLOW, "batch_norm": CONTROL_FLOW, "binary_cross_entropy": CONTROL_FLOW, "binary_cross_entropy_with_logits": CONTROL_FLOW, "celu": CONTROL_FLOW, "cosine_embedding_loss": CONTROL_FLOW, "cross_entropy": CONTROL_FLOW, "ctc_loss": CONTROL_FLOW, "dropout": CONTROL_FLOW, "dropout2d": CONTROL_FLOW, "dropout3d": CONTROL_FLOW, "elu": CONTROL_FLOW, "embedding": CONTROL_FLOW, "embedding_bag": CONTROL_FLOW, "feature_alpha_dropout": CONTROL_FLOW, "fold": CONTROL_FLOW, "gaussian_nll_loss": CONTROL_FLOW, "glu": CONTROL_FLOW, "grid_sample": CONTROL_FLOW, "gumbel_softmax": CONTROL_FLOW, "hardsigmoid": CONTROL_FLOW, "hardswish": CONTROL_FLOW, "hardtanh": CONTROL_FLOW, "hinge_embedding_loss": CONTROL_FLOW, "huber_loss": CONTROL_FLOW, "interpolate": CONTROL_FLOW, "kl_div": CONTROL_FLOW, "l1_loss": CONTROL_FLOW, "leaky_relu": CONTROL_FLOW, "local_response_norm": CONTROL_FLOW, "margin_ranking_loss": CONTROL_FLOW, "mse_loss": CONTROL_FLOW, "multi_head_attention_forward": CONTROL_FLOW, "multi_margin_loss": CONTROL_FLOW, "multilabel_margin_loss": CONTROL_FLOW, "multilabel_soft_margin_loss": CONTROL_FLOW, "nll_loss": CONTROL_FLOW, "poisson_nll_loss": CONTROL_FLOW, "relu": CONTROL_FLOW, "relu6": CONTROL_FLOW, "rrelu": CONTROL_FLOW, "selu": CONTROL_FLOW, "silu": CONTROL_FLOW, "mish": CONTROL_FLOW, "smooth_l1_loss": CONTROL_FLOW, "soft_margin_loss": CONTROL_FLOW, "threshold": CONTROL_FLOW, "triplet_margin_loss": CONTROL_FLOW, "triplet_margin_with_distance_loss": CONTROL_FLOW, "unfold": CONTROL_FLOW, "upsample": CONTROL_FLOW, "upsample_bilinear": INTERPOLATE_ARGS_CONFLICT, "upsample_nearest": INTERPOLATE_ARGS_CONFLICT, } # List of nn.functionals with Tensor inputs but not with type annotation FUNCTIONALS_WITHOUT_ANNOTATION = ( "adaptive_max_pool1d", "adaptive_max_pool2d", "adaptive_max_pool3d", "fractional_max_pool2d", "fractional_max_pool3d", "max_pool1d", "max_pool2d", "max_pool3d", "gaussian_nll_loss", "upsample", "upsample_bilinear", "upsample_nearest", ) # Inconsistent behavior between Python 3.8 and other Python versions: # - Python 3.8+: Re-raise internal exception like `PROXY_ITERATED` # - Other Python: Raise `argument of type 'Proxy' is not iterable` due to the same # internal exception above # Use the following map to override the expected exception for Python 3.8 UNTRACEABLE_FUNCTIONALS_PY38 = { "adaptive_max_pool1d": PROXY_ITERATED, "adaptive_max_pool2d": PROXY_ITERATED, "adaptive_max_pool3d": PROXY_ITERATED, "fractional_max_pool2d": PROXY_ITERATED, "fractional_max_pool3d": PROXY_ITERATED, "max_pool1d": PROXY_ITERATED, "max_pool2d": PROXY_ITERATED, "max_pool3d": PROXY_ITERATED, "group_norm": LEN_ERROR } @classmethod def _get_functional(cls): functional_list = [] for f in dir(torch.nn.functional): if not f.islower(): continue # Ignore internal functions if f.startswith('_'): continue # Ignore supporting functions if f in cls.IGNORE_FUNCS: continue fn = getattr(torch.nn.functional, f) # Ignore non-callable object like modules if not isinstance(fn, Callable): continue if f not in cls.FUNCTIONALS_WITHOUT_ANNOTATION: try: sig = inspect.signature(fn) has_tensor_arg = False for arg, param in sig.parameters.items(): if isinstance(param.annotation, type) and issubclass(param.annotation, torch.Tensor): has_tensor_arg = True if not has_tensor_arg: continue # No signature or Object is not supported except ValueError: pass functional_list.append((f, fn)) return functional_list @classmethod def generate_test_func(cls, func_name, fn): def functional_test(self): if func_name in self.UNTRACEABLE_FUNCTIONALS_PY38 and \ sys.version_info >= (3, 8) and sys.version_info < (3, 10): exc, err = self.UNTRACEABLE_FUNCTIONALS_PY38[func_name] with self.assertRaisesRegex(exc, err): symbolic_trace(fn) elif func_name in self.UNTRACEABLE_FUNCTIONALS: exc, err = self.UNTRACEABLE_FUNCTIONALS[func_name] with self.assertRaisesRegex(exc, err): symbolic_trace(fn) else: symbolic_trace(fn) return functional_test @classmethod def generate_tests(cls): functional_list = cls._get_functional() for func_name, fn in functional_list: test_name = "test_nn_functional_" + func_name functional_test = cls.generate_test_func(func_name, fn) setattr(cls, test_name, functional_test) @classmethod def setUpClass(cls): def no(*args, **kwargs): return False for name in cls.TO_PATCH.keys(): cls.TO_PATCH[name] = getattr(torch.nn.functional, name) setattr(torch.nn.functional, name, no) @classmethod def tearDownClass(cls): for name in cls.TO_PATCH.keys(): setattr(torch.nn.functional, name, cls.TO_PATCH[name]) TestFunctionalTracing.generate_tests() instantiate_device_type_tests(TestOperatorSignatures, globals()) @skipIfNoTorchVision class TestVisionTracing(JitTestCase): PROXY_ITERATED = (TraceError, r"Proxy object cannot be iterated") INCONSISTENT_TYPE = ( RuntimeError, r"Return value was annotated as having type __torch__.torchvision.models[.\w]+ but is actually of type Tensor" ) UNTRACEABLE_MODELS = { "fasterrcnn_resnet50_fpn": PROXY_ITERATED, "fasterrcnn_mobilenet_v3_large_320_fpn": PROXY_ITERATED, "fasterrcnn_mobilenet_v3_large_fpn": PROXY_ITERATED, "maskrcnn_resnet50_fpn": PROXY_ITERATED, "keypointrcnn_resnet50_fpn": PROXY_ITERATED, "retinanet_resnet50_fpn": PROXY_ITERATED, } UNSCRIPTABLE_MODELS = { "googlenet": INCONSISTENT_TYPE, "inception_v3": INCONSISTENT_TYPE, } output_transform = { "fcn_resnet50": lambda x: x["out"], "fcn_resnet101": lambda x: x["out"], "deeplabv3_resnet50": lambda x: x["out"], "deeplabv3_resnet101": lambda x: x["out"], "deeplabv3_mobilenet_v3_large": lambda x: x["out"], "lraspp_mobilenet_v3_large": lambda x: x["out"], "fasterrcnn_resnet50_fpn": lambda x: x[1], "fasterrcnn_mobilenet_v3_large_fpn": lambda x: x[1], "fasterrcnn_mobilenet_v3_large_320_fpn": lambda x: x[1], "maskrcnn_resnet50_fpn": lambda x: x[1], "keypointrcnn_resnet50_fpn": lambda x: x[1], "retinanet_resnet50_fpn": lambda x: x[1], } @classmethod def generate_test_fn(cls, name, model_fn, x, kwargs): def run_test(self): model = model_fn(**kwargs) model = model.eval() if name in self.UNTRACEABLE_MODELS: err, exc = self.UNTRACEABLE_MODELS[name] with self.assertRaisesRegex(err, exc): graph = symbolic_trace(model) else: out_transform = self.output_transform.get(name, lambda x: x) graph : torch.fx.GraphModule = symbolic_trace(model) a = out_transform(model(x)) b = out_transform(graph(x)) self.assertEqual(a, b) if name in self.UNSCRIPTABLE_MODELS: err, exc = self.UNSCRIPTABLE_MODELS[name] with self.assertRaisesRegex(err, exc): script = torch.jit.script(graph) else: script = torch.jit.script(graph) c = out_transform(script(x)) self.assertEqual(a, c) return run_test @classmethod def generate_classification_tests(cls): for k, v in torchvision_models.__dict__.items(): if callable(v) and k[0].lower() == k[0] and k[0] != "_": test_name = 'test_torchvision_models_' + k x = torch.rand(1, 3, 299, 299) if k in ['inception_v3'] else torch.rand(1, 3, 224, 224) kwargs = dict(num_classes=50) model_test = cls.generate_test_fn(k, v, x, kwargs) setattr(cls, test_name, model_test) @classmethod def generate_segmentation_tests(cls): for k, v in torchvision_models.segmentation.__dict__.items(): if callable(v) and k[0].lower() == k[0] and k[0] != "_": test_name = 'test_torchvision_models_segmentation_' + k x = torch.rand(1, 3, 32, 32) kwargs = dict(num_classes=10, pretrained_backbone=False) model_test = cls.generate_test_fn(k, v, x, kwargs) setattr(cls, test_name, model_test) @classmethod def generate_detection_tests(cls): for k, v in torchvision_models.detection.__dict__.items(): if callable(v) and k[0].lower() == k[0] and k[0] != "_": test_name = 'test_torchvision_models_detection_' + k x = [torch.rand(3, 300, 300)] kwargs = dict(num_classes=10, pretrained_backbone=False) model_test = cls.generate_test_fn(k, v, x, kwargs) setattr(cls, test_name, model_test) @classmethod def generate_video_tests(cls): for k, v in torchvision_models.video.__dict__.items(): if callable(v) and k[0].lower() == k[0] and k[0] != "_": test_name = 'test_torchvision_models_video_' + k x = torch.rand(1, 3, 4, 112, 112) kwargs = dict(num_classes=50) model_test = cls.generate_test_fn(k, v, x, kwargs) setattr(cls, test_name, model_test) @classmethod def generate_tests(cls): cls.generate_classification_tests() cls.generate_detection_tests() cls.generate_segmentation_tests() cls.generate_video_tests() if HAS_TORCHVISION: TestVisionTracing.generate_tests() if __name__ == '__main__': run_tests()
refactor.py
# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Refactoring framework. Used as a main program, this can refactor any number of files and/or recursively descend down directories. Imported as a module, this provides infrastructure to write your own refactoring tool. """ __author__ = "Guido van Rossum <guido@python.org>" # Python imports import os import sys import logging import operator import collections import io from itertools import chain # Local imports from .pgen2 import driver, tokenize, token from .fixer_util import find_root from . import pytree, pygram from . import btm_matcher as bm def get_all_fix_names(fixer_pkg, remove_prefix=True): """Return a sorted list of all available fix names in the given package.""" pkg = __import__(fixer_pkg, [], [], ["*"]) fixer_dir = os.path.dirname(pkg.__file__) fix_names = [] for name in sorted(os.listdir(fixer_dir)): if name.startswith("fix_") and name.endswith(".py"): if remove_prefix: name = name[4:] fix_names.append(name[:-3]) return fix_names class _EveryNode(Exception): pass def _get_head_types(pat): """ Accepts a pytree Pattern Node and returns a set of the pattern types which will match first. """ if isinstance(pat, (pytree.NodePattern, pytree.LeafPattern)): # NodePatters must either have no type and no content # or a type and content -- so they don't get any farther # Always return leafs if pat.type is None: raise _EveryNode return {pat.type} if isinstance(pat, pytree.NegatedPattern): if pat.content: return _get_head_types(pat.content) raise _EveryNode # Negated Patterns don't have a type if isinstance(pat, pytree.WildcardPattern): # Recurse on each node in content r = set() for p in pat.content: for x in p: r.update(_get_head_types(x)) return r raise Exception("Oh no! I don't understand pattern %s" %(pat)) def _get_headnode_dict(fixer_list): """ Accepts a list of fixers and returns a dictionary of head node type --> fixer list. """ head_nodes = collections.defaultdict(list) every = [] for fixer in fixer_list: if fixer.pattern: try: heads = _get_head_types(fixer.pattern) except _EveryNode: every.append(fixer) else: for node_type in heads: head_nodes[node_type].append(fixer) else: if fixer._accept_type is not None: head_nodes[fixer._accept_type].append(fixer) else: every.append(fixer) for node_type in chain(pygram.python_grammar.symbol2number.values(), pygram.python_grammar.tokens): head_nodes[node_type].extend(every) return dict(head_nodes) def get_fixers_from_package(pkg_name): """ Return the fully qualified names for fixers in the package pkg_name. """ return [pkg_name + "." + fix_name for fix_name in get_all_fix_names(pkg_name, False)] def _identity(obj): return obj if sys.version_info < (3, 0): import codecs _open_with_encoding = codecs.open # codecs.open doesn't translate newlines sadly. def _from_system_newlines(input): return input.replace("\r\n", "\n") def _to_system_newlines(input): if os.linesep != "\n": return input.replace("\n", os.linesep) else: return input else: _open_with_encoding = open _from_system_newlines = _identity _to_system_newlines = _identity def _detect_future_features(source): have_docstring = False gen = tokenize.generate_tokens(io.StringIO(source).readline) def advance(): tok = next(gen) return tok[0], tok[1] ignore = frozenset({token.NEWLINE, tokenize.NL, token.COMMENT}) features = set() try: while True: tp, value = advance() if tp in ignore: continue elif tp == token.STRING: if have_docstring: break have_docstring = True elif tp == token.NAME and value == "from": tp, value = advance() if tp != token.NAME or value != "__future__": break tp, value = advance() if tp != token.NAME or value != "import": break tp, value = advance() if tp == token.OP and value == "(": tp, value = advance() while tp == token.NAME: features.add(value) tp, value = advance() if tp != token.OP or value != ",": break tp, value = advance() else: break except StopIteration: pass return frozenset(features) class FixerError(Exception): """A fixer could not be loaded.""" class RefactoringTool(object): _default_options = {"print_function" : False, "write_unchanged_files" : False} CLASS_PREFIX = "Fix" # The prefix for fixer classes FILE_PREFIX = "fix_" # The prefix for modules with a fixer within def __init__(self, fixer_names, options=None, explicit=None): """Initializer. Args: fixer_names: a list of fixers to import options: a dict with configuration. explicit: a list of fixers to run even if they are explicit. """ self.fixers = fixer_names self.explicit = explicit or [] self.options = self._default_options.copy() if options is not None: self.options.update(options) if self.options["print_function"]: self.grammar = pygram.python_grammar_no_print_statement else: self.grammar = pygram.python_grammar # When this is True, the refactor*() methods will call write_file() for # files processed even if they were not changed during refactoring. If # and only if the refactor method's write parameter was True. self.write_unchanged_files = self.options.get("write_unchanged_files") self.errors = [] self.logger = logging.getLogger("RefactoringTool") self.fixer_log = [] self.wrote = False self.driver = driver.Driver(self.grammar, convert=pytree.convert, logger=self.logger) self.pre_order, self.post_order = self.get_fixers() self.files = [] # List of files that were or should be modified self.BM = bm.BottomMatcher() self.bmi_pre_order = [] # Bottom Matcher incompatible fixers self.bmi_post_order = [] for fixer in chain(self.post_order, self.pre_order): if fixer.BM_compatible: self.BM.add_fixer(fixer) # remove fixers that will be handled by the bottom-up # matcher elif fixer in self.pre_order: self.bmi_pre_order.append(fixer) elif fixer in self.post_order: self.bmi_post_order.append(fixer) self.bmi_pre_order_heads = _get_headnode_dict(self.bmi_pre_order) self.bmi_post_order_heads = _get_headnode_dict(self.bmi_post_order) def get_fixers(self): """Inspects the options to load the requested patterns and handlers. Returns: (pre_order, post_order), where pre_order is the list of fixers that want a pre-order AST traversal, and post_order is the list that want post-order traversal. """ pre_order_fixers = [] post_order_fixers = [] for fix_mod_path in self.fixers: mod = __import__(fix_mod_path, {}, {}, ["*"]) fix_name = fix_mod_path.rsplit(".", 1)[-1] if fix_name.startswith(self.FILE_PREFIX): fix_name = fix_name[len(self.FILE_PREFIX):] parts = fix_name.split("_") class_name = self.CLASS_PREFIX + "".join([p.title() for p in parts]) try: fix_class = getattr(mod, class_name) except AttributeError: raise FixerError("Can't find %s.%s" % (fix_name, class_name)) fixer = fix_class(self.options, self.fixer_log) if fixer.explicit and self.explicit is not True and \ fix_mod_path not in self.explicit: self.log_message("Skipping optional fixer: %s", fix_name) continue self.log_debug("Adding transformation: %s", fix_name) if fixer.order == "pre": pre_order_fixers.append(fixer) elif fixer.order == "post": post_order_fixers.append(fixer) else: raise FixerError("Illegal fixer order: %r" % fixer.order) key_func = operator.attrgetter("run_order") pre_order_fixers.sort(key=key_func) post_order_fixers.sort(key=key_func) return (pre_order_fixers, post_order_fixers) def log_error(self, msg, *args, **kwds): """Called when an error occurs.""" raise def log_message(self, msg, *args): """Hook to log a message.""" if args: msg = msg % args self.logger.info(msg) def log_debug(self, msg, *args): if args: msg = msg % args self.logger.debug(msg) def print_output(self, old_text, new_text, filename, equal): """Called with the old version, new version, and filename of a refactored file.""" pass def refactor(self, items, write=False, doctests_only=False): """Refactor a list of files and directories.""" for dir_or_file in items: if os.path.isdir(dir_or_file): self.refactor_dir(dir_or_file, write, doctests_only) else: self.refactor_file(dir_or_file, write, doctests_only) def refactor_dir(self, dir_name, write=False, doctests_only=False): """Descends down a directory and refactor every Python file found. Python files are assumed to have a .py extension. Files and subdirectories starting with '.' are skipped. """ py_ext = os.extsep + "py" for dirpath, dirnames, filenames in os.walk(dir_name): self.log_debug("Descending into %s", dirpath) dirnames.sort() filenames.sort() for name in filenames: if (not name.startswith(".") and os.path.splitext(name)[1] == py_ext): fullname = os.path.join(dirpath, name) self.refactor_file(fullname, write, doctests_only) # Modify dirnames in-place to remove subdirs with leading dots dirnames[:] = [dn for dn in dirnames if not dn.startswith(".")] def _read_python_source(self, filename): """ Do our best to decode a Python source file correctly. """ try: f = open(filename, "rb") except OSError as err: self.log_error("Can't open %s: %s", filename, err) return None, None try: encoding = tokenize.detect_encoding(f.readline)[0] finally: f.close() with _open_with_encoding(filename, "r", encoding=encoding) as f: return _from_system_newlines(f.read()), encoding def refactor_file(self, filename, write=False, doctests_only=False): """Refactors a file.""" input, encoding = self._read_python_source(filename) if input is None: # Reading the file failed. return input += "\n" # Silence certain parse errors if doctests_only: self.log_debug("Refactoring doctests in %s", filename) output = self.refactor_docstring(input, filename) if self.write_unchanged_files or output != input: self.processed_file(output, filename, input, write, encoding) else: self.log_debug("No doctest changes in %s", filename) else: tree = self.refactor_string(input, filename) if self.write_unchanged_files or (tree and tree.was_changed): # The [:-1] is to take off the \n we added earlier self.processed_file(str(tree)[:-1], filename, write=write, encoding=encoding) else: self.log_debug("No changes in %s", filename) def refactor_string(self, data, name): """Refactor a given input string. Args: data: a string holding the code to be refactored. name: a human-readable name for use in error/log messages. Returns: An AST corresponding to the refactored input stream; None if there were errors during the parse. """ features = _detect_future_features(data) if "print_function" in features: self.driver.grammar = pygram.python_grammar_no_print_statement try: tree = self.driver.parse_string(data) except Exception as err: self.log_error("Can't parse %s: %s: %s", name, err.__class__.__name__, err) return finally: self.driver.grammar = self.grammar tree.future_features = features self.log_debug("Refactoring %s", name) self.refactor_tree(tree, name) return tree def refactor_stdin(self, doctests_only=False): input = sys.stdin.read() if doctests_only: self.log_debug("Refactoring doctests in stdin") output = self.refactor_docstring(input, "<stdin>") if self.write_unchanged_files or output != input: self.processed_file(output, "<stdin>", input) else: self.log_debug("No doctest changes in stdin") else: tree = self.refactor_string(input, "<stdin>") if self.write_unchanged_files or (tree and tree.was_changed): self.processed_file(str(tree), "<stdin>", input) else: self.log_debug("No changes in stdin") def refactor_tree(self, tree, name): """Refactors a parse tree (modifying the tree in place). For compatible patterns the bottom matcher module is used. Otherwise the tree is traversed node-to-node for matches. Args: tree: a pytree.Node instance representing the root of the tree to be refactored. name: a human-readable name for this tree. Returns: True if the tree was modified, False otherwise. """ for fixer in chain(self.pre_order, self.post_order): fixer.start_tree(tree, name) #use traditional matching for the incompatible fixers self.traverse_by(self.bmi_pre_order_heads, tree.pre_order()) self.traverse_by(self.bmi_post_order_heads, tree.post_order()) # obtain a set of candidate nodes match_set = self.BM.run(tree.leaves()) while any(match_set.values()): for fixer in self.BM.fixers: if fixer in match_set and match_set[fixer]: #sort by depth; apply fixers from bottom(of the AST) to top match_set[fixer].sort(key=pytree.Base.depth, reverse=True) if fixer.keep_line_order: #some fixers(eg fix_imports) must be applied #with the original file's line order match_set[fixer].sort(key=pytree.Base.get_lineno) for node in list(match_set[fixer]): if node in match_set[fixer]: match_set[fixer].remove(node) try: find_root(node) except ValueError: # this node has been cut off from a # previous transformation ; skip continue if node.fixers_applied and fixer in node.fixers_applied: # do not apply the same fixer again continue results = fixer.match(node) if results: new = fixer.transform(node, results) if new is not None: node.replace(new) #new.fixers_applied.append(fixer) for node in new.post_order(): # do not apply the fixer again to # this or any subnode if not node.fixers_applied: node.fixers_applied = [] node.fixers_applied.append(fixer) # update the original match set for # the added code new_matches = self.BM.run(new.leaves()) for fxr in new_matches: if not fxr in match_set: match_set[fxr]=[] match_set[fxr].extend(new_matches[fxr]) for fixer in chain(self.pre_order, self.post_order): fixer.finish_tree(tree, name) return tree.was_changed def traverse_by(self, fixers, traversal): """Traverse an AST, applying a set of fixers to each node. This is a helper method for refactor_tree(). Args: fixers: a list of fixer instances. traversal: a generator that yields AST nodes. Returns: None """ if not fixers: return for node in traversal: for fixer in fixers[node.type]: results = fixer.match(node) if results: new = fixer.transform(node, results) if new is not None: node.replace(new) node = new def processed_file(self, new_text, filename, old_text=None, write=False, encoding=None): """ Called when a file has been refactored and there may be changes. """ self.files.append(filename) if old_text is None: old_text = self._read_python_source(filename)[0] if old_text is None: return equal = old_text == new_text self.print_output(old_text, new_text, filename, equal) if equal: self.log_debug("No changes to %s", filename) if not self.write_unchanged_files: return if write: self.write_file(new_text, filename, old_text, encoding) else: self.log_debug("Not writing changes to %s", filename) def write_file(self, new_text, filename, old_text, encoding=None): """Writes a string to a file. It first shows a unified diff between the old text and the new text, and then rewrites the file; the latter is only done if the write option is set. """ try: f = _open_with_encoding(filename, "w", encoding=encoding) except OSError as err: self.log_error("Can't create %s: %s", filename, err) return try: f.write(_to_system_newlines(new_text)) except OSError as err: self.log_error("Can't write %s: %s", filename, err) finally: f.close() self.log_debug("Wrote changes to %s", filename) self.wrote = True PS1 = ">>> " PS2 = "... " def refactor_docstring(self, input, filename): """Refactors a docstring, looking for doctests. This returns a modified version of the input string. It looks for doctests, which start with a ">>>" prompt, and may be continued with "..." prompts, as long as the "..." is indented the same as the ">>>". (Unfortunately we can't use the doctest module's parser, since, like most parsers, it is not geared towards preserving the original source.) """ result = [] block = None block_lineno = None indent = None lineno = 0 for line in input.splitlines(keepends=True): lineno += 1 if line.lstrip().startswith(self.PS1): if block is not None: result.extend(self.refactor_doctest(block, block_lineno, indent, filename)) block_lineno = lineno block = [line] i = line.find(self.PS1) indent = line[:i] elif (indent is not None and (line.startswith(indent + self.PS2) or line == indent + self.PS2.rstrip() + "\n")): block.append(line) else: if block is not None: result.extend(self.refactor_doctest(block, block_lineno, indent, filename)) block = None indent = None result.append(line) if block is not None: result.extend(self.refactor_doctest(block, block_lineno, indent, filename)) return "".join(result) def refactor_doctest(self, block, lineno, indent, filename): """Refactors one doctest. A doctest is given as a block of lines, the first of which starts with ">>>" (possibly indented), while the remaining lines start with "..." (identically indented). """ try: tree = self.parse_block(block, lineno, indent) except Exception as err: if self.logger.isEnabledFor(logging.DEBUG): for line in block: self.log_debug("Source: %s", line.rstrip("\n")) self.log_error("Can't parse docstring in %s line %s: %s: %s", filename, lineno, err.__class__.__name__, err) return block if self.refactor_tree(tree, filename): new = str(tree).splitlines(keepends=True) # Undo the adjustment of the line numbers in wrap_toks() below. clipped, new = new[:lineno-1], new[lineno-1:] assert clipped == ["\n"] * (lineno-1), clipped if not new[-1].endswith("\n"): new[-1] += "\n" block = [indent + self.PS1 + new.pop(0)] if new: block += [indent + self.PS2 + line for line in new] return block def summarize(self): if self.wrote: were = "were" else: were = "need to be" if not self.files: self.log_message("No files %s modified.", were) else: self.log_message("Files that %s modified:", were) for file in self.files: self.log_message(file) if self.fixer_log: self.log_message("Warnings/messages while refactoring:") for message in self.fixer_log: self.log_message(message) if self.errors: if len(self.errors) == 1: self.log_message("There was 1 error:") else: self.log_message("There were %d errors:", len(self.errors)) for msg, args, kwds in self.errors: self.log_message(msg, *args, **kwds) def parse_block(self, block, lineno, indent): """Parses a block into a tree. This is necessary to get correct line number / offset information in the parser diagnostics and embedded into the parse tree. """ tree = self.driver.parse_tokens(self.wrap_toks(block, lineno, indent)) tree.future_features = frozenset() return tree def wrap_toks(self, block, lineno, indent): """Wraps a tokenize stream to systematically modify start/end.""" tokens = tokenize.generate_tokens(self.gen_lines(block, indent).__next__) for type, value, (line0, col0), (line1, col1), line_text in tokens: line0 += lineno - 1 line1 += lineno - 1 # Don't bother updating the columns; this is too complicated # since line_text would also have to be updated and it would # still break for tokens spanning lines. Let the user guess # that the column numbers for doctests are relative to the # end of the prompt string (PS1 or PS2). yield type, value, (line0, col0), (line1, col1), line_text def gen_lines(self, block, indent): """Generates lines as expected by tokenize from a list of lines. This strips the first len(indent + self.PS1) characters off each line. """ prefix1 = indent + self.PS1 prefix2 = indent + self.PS2 prefix = prefix1 for line in block: if line.startswith(prefix): yield line[len(prefix):] elif line == prefix.rstrip() + "\n": yield "\n" else: raise AssertionError("line=%r, prefix=%r" % (line, prefix)) prefix = prefix2 while True: yield "" class MultiprocessingUnsupported(Exception): pass class MultiprocessRefactoringTool(RefactoringTool): def __init__(self, *args, **kwargs): super(MultiprocessRefactoringTool, self).__init__(*args, **kwargs) self.queue = None self.output_lock = None def refactor(self, items, write=False, doctests_only=False, num_processes=1): if num_processes == 1: return super(MultiprocessRefactoringTool, self).refactor( items, write, doctests_only) try: import multiprocessing except ImportError: raise MultiprocessingUnsupported if self.queue is not None: raise RuntimeError("already doing multiple processes") self.queue = multiprocessing.JoinableQueue() self.output_lock = multiprocessing.Lock() processes = [multiprocessing.Process(target=self._child) for i in range(num_processes)] try: for p in processes: p.start() super(MultiprocessRefactoringTool, self).refactor(items, write, doctests_only) finally: self.queue.join() for i in range(num_processes): self.queue.put(None) for p in processes: if p.is_alive(): p.join() self.queue = None def _child(self): task = self.queue.get() while task is not None: args, kwargs = task try: super(MultiprocessRefactoringTool, self).refactor_file( *args, **kwargs) finally: self.queue.task_done() task = self.queue.get() def refactor_file(self, *args, **kwargs): if self.queue is not None: self.queue.put((args, kwargs)) else: return super(MultiprocessRefactoringTool, self).refactor_file( *args, **kwargs)
data_processing.py
# -*- coding: utf-8 -*- import numpy as np import re import random import json import collections import numpy as np import util.parameters as params from tqdm import tqdm import nltk from nltk.corpus import wordnet as wn import os import pickle import multiprocessing from nltk.tag import StanfordNERTagger from nltk.tag import StanfordPOSTagger FIXED_PARAMETERS, config = params.load_parameters() LABEL_MAP = { "entailment": 0, "neutral": 1, "contradiction": 2, "hidden": -1 } PADDING = "<PAD>" POS_Tagging = [PADDING, 'WP$', 'RBS', 'SYM', 'WRB', 'IN', 'VB', 'POS', 'TO', ':', '-RRB-', '$', 'MD', 'JJ', '#', 'CD', '``', 'JJR', 'NNP', "''", 'LS', 'VBP', 'VBD', 'FW', 'RBR', 'JJS', 'DT', 'VBG', 'RP', 'NNS', 'RB', 'PDT', 'PRP$', '.', 'XX', 'NNPS', 'UH', 'EX', 'NN', 'WDT', 'VBN', 'VBZ', 'CC', ',', '-LRB-', 'PRP', 'WP'] POS_dict = {pos:i for i, pos in enumerate(POS_Tagging)} base_path = os.getcwd() nltk_data_path = base_path + "/../TF/nltk_data" nltk.data.path.append(nltk_data_path) stemmer = nltk.SnowballStemmer('english') tt = nltk.tokenize.treebank.TreebankWordTokenizer() def load_nli_data(path, snli=False, shuffle = True): """ Load MultiNLI or SNLI data. If the "snli" parameter is set to True, a genre label of snli will be assigned to the data. """ data = [] with open(path) as f: for line in tqdm(f): loaded_example = json.loads(line) if loaded_example["gold_label"] not in LABEL_MAP: continue loaded_example["label"] = LABEL_MAP[loaded_example["gold_label"]] if snli: loaded_example["genre"] = "snli" data.append(loaded_example) if shuffle: random.seed(1) random.shuffle(data) return data def load_nli_data_genre(path, genre, snli=True, shuffle = True): """ Load a specific genre's examples from MultiNLI, or load SNLI data and assign a "snli" genre to the examples. If the "snli" parameter is set to True, a genre label of snli will be assigned to the data. If set to true, it will overwrite the genre label for MultiNLI data. """ data = [] j = 0 with open(path) as f: for line in f: loaded_example = json.loads(line) if loaded_example["gold_label"] not in LABEL_MAP: continue loaded_example["label"] = LABEL_MAP[loaded_example["gold_label"]] if snli: loaded_example["genre"] = "snli" if loaded_example["genre"] == genre: data.append(loaded_example) if shuffle: random.seed(1) random.shuffle(data) return data def is_exact_match(token1, token2): token1 = token1.lower() token2 = token2.lower() token1_stem = stemmer.stem(token1) if token1 == token2: return True for synsets in wn.synsets(token2): for lemma in synsets.lemma_names(): if token1_stem == stemmer.stem(lemma): return True if token1 == "n't" and token2 == "not": return True elif token1 == "not" and token2 == "n't": return True elif token1_stem == stemmer.stem(token2): return True return False def is_antonyms(token1, token2): token1 = token1.lower() token2 = token2.lower() token1_stem = stemmer.stem(token1) antonym_lists_for_token2 = [] for synsets in wn.synsets(token2): for lemma_synsets in [wn.synsets(l) for l in synsets.lemma_names()]: for lemma_syn in lemma_synsets: for lemma in lemma_syn.lemmas(): for antonym in lemma.antonyms(): antonym_lists_for_token2.append(antonym.name()) # if token1_stem == stemmer.stem(antonym.name()): # return True antonym_lists_for_token2 = list(set(antonym_lists_for_token2)) for atnm in antonym_lists_for_token2: if token1_stem == stemmer.stem(atnm): return True return False def worker(shared_content, dataset): def tokenize(string): string = re.sub(r'\(|\)', '', string) return string.split() for example in tqdm(dataset): s1_tokenize = tokenize(example['sentence1_binary_parse']) s2_tokenize = tokenize(example['sentence2_binary_parse']) s1_token_exact_match = [0] * len(s1_tokenize) s2_token_exact_match = [0] * len(s2_tokenize) s1_token_antonym = [0] * len(s1_tokenize) s2_token_antonym = [0] * len(s2_tokenize) for i, word in enumerate(s1_tokenize): matched = False for j, w2 in enumerate(s2_tokenize): matched = is_exact_match(word, w2) if matched: s1_token_exact_match[i] = 1 s2_token_exact_match[j] = 1 content = {} content['sentence1_token_exact_match_with_s2'] = s1_token_exact_match content['sentence2_token_exact_match_with_s1'] = s2_token_exact_match shared_content[example["pairID"]] = content # print(shared_content[example["pairID"]]) # print(shared_content) def load_shared_content(fh, shared_content): for line in fh: row = line.rstrip().split("\t") key = row[0] value = json.loads(row[1]) shared_content[key] = value def load_mnli_shared_content(): shared_file_exist = False # shared_path = config.datapath + "/shared_2D_EM.json" # shared_path = config.datapath + "/shared_anto.json" # shared_path = config.datapath + "/shared_NER.json" shared_path = config.datapath + "/shared.jsonl" # shared_path = "../shared.json" print(shared_path) if os.path.isfile(shared_path): shared_file_exist = True # shared_content = {} assert shared_file_exist # if not shared_file_exist and config.use_exact_match_feature: # with open(shared_path, 'w') as f: # json.dump(dict(reconvert_shared_content), f) # elif config.use_exact_match_feature: with open(shared_path) as f: shared_content = {} load_shared_content(f, shared_content) # shared_content = json.load(f) return shared_content def sentences_to_padded_index_sequences(datasets): """ Annotate datasets with feature vectors. Adding right-sided padding. """ # Extract vocabulary def tokenize(string): string = re.sub(r'\(|\)', '', string) return string.split() word_counter = collections.Counter() char_counter = collections.Counter() # mgr = multiprocessing.Manager() # shared_content = mgr.dict() # process_num = config.num_process_prepro # process_num = 1 for i, dataset in enumerate(datasets): # if not shared_file_exist: # num_per_share = len(dataset) / process_num + 1 # jobs = [ multiprocessing.Process(target=worker, args=(shared_content, dataset[i * num_per_share : (i + 1) * num_per_share] )) for i in range(process_num)] # for j in jobs: # j.start() # for j in jobs: # j.join() for example in tqdm(dataset): s1_tokenize = tokenize(example['sentence1_binary_parse']) s2_tokenize = tokenize(example['sentence2_binary_parse']) word_counter.update(s1_tokenize) word_counter.update(s2_tokenize) for i, word in enumerate(s1_tokenize): char_counter.update([c for c in word]) for word in s2_tokenize: char_counter.update([c for c in word]) # shared_content = {k:v for k, v in shared_content.items()} vocabulary = set([word for word in word_counter]) vocabulary = list(vocabulary) if config.embedding_replacing_rare_word_with_UNK: vocabulary = [PADDING, "<UNK>"] + vocabulary else: vocabulary = [PADDING] + vocabulary # print(char_counter) word_indices = dict(zip(vocabulary, range(len(vocabulary)))) indices_to_words = {v: k for k, v in word_indices.items()} char_vocab = set([char for char in char_counter]) char_vocab = list(char_vocab) char_vocab = [PADDING] + char_vocab char_indices = dict(zip(char_vocab, range(len(char_vocab)))) indices_to_char = {v: k for k, v in char_indices.items()} for i, dataset in enumerate(datasets): for example in tqdm(dataset): for sentence in ['sentence1_binary_parse', 'sentence2_binary_parse']: example[sentence + '_index_sequence'] = np.zeros((FIXED_PARAMETERS["seq_length"]), dtype=np.int32) example[sentence + '_inverse_term_frequency'] = np.zeros((FIXED_PARAMETERS["seq_length"]), dtype=np.float32) token_sequence = tokenize(example[sentence]) padding = FIXED_PARAMETERS["seq_length"] - len(token_sequence) for i in range(FIXED_PARAMETERS["seq_length"]): if i >= len(token_sequence): index = word_indices[PADDING] itf = 0 else: if config.embedding_replacing_rare_word_with_UNK: index = word_indices[token_sequence[i]] if word_counter[token_sequence[i]] >= config.UNK_threshold else word_indices["<UNK>"] else: index = word_indices[token_sequence[i]] itf = 1 / (word_counter[token_sequence[i]] + 1) example[sentence + '_index_sequence'][i] = index example[sentence + '_inverse_term_frequency'][i] = itf example[sentence + '_char_index'] = np.zeros((FIXED_PARAMETERS["seq_length"], config.char_in_word_size), dtype=np.int32) for i in range(FIXED_PARAMETERS["seq_length"]): if i >= len(token_sequence): continue else: chars = [c for c in token_sequence[i]] for j in range(config.char_in_word_size): if j >= (len(chars)): break else: index = char_indices[chars[j]] example[sentence + '_char_index'][i,j] = index return indices_to_words, word_indices, char_indices, indices_to_char def get_subword_list(token): token = token.lower() token = "<" + token + ">" subword_list = [] for i in [3,4,5,6]: for j in range(len(token) - i + 1): subword_list.append(token[j : j + i]) return subword_list def load_subword_list(sentences, rand = False): list_of_vectors = [] for sentence in sentences: sentence_vector = [] for i in range(config.seq_length): if i < len(sentence): idx = range(len(sentence[i])) if rand: random.shuffle(idx) token_subword_feature_list = [sentence[i][index] for index in idx][:config.subword_feature_len] if len(token_subword_feature_list) < config.subword_feature_len: token_subword_feature_list += [0] * (config.subword_feature_len - len(token_subword_feature_list)) sentence_vector.append(token_subword_feature_list) else: sentence_vector.append([0] * config.subword_feature_len) list_of_vectors.append(sentence_vector) return np.array(list_of_vectors) def parsing_parse(parse): base_parse = [s.rstrip(" ").rstrip(")") for s in parse.split("(") if ")" in s] pos = [pair.split(" ")[0] for pair in base_parse] return pos def parse_to_pos_vector(parse, left_padding_and_cropping_pair = (0,0)): # ONE HOT pos = parsing_parse(parse) pos_vector = [POS_dict.get(tag,0) for tag in pos] left_padding, left_cropping = left_padding_and_cropping_pair vector = np.zeros((FIXED_PARAMETERS["seq_length"],len(POS_Tagging))) assert left_padding == 0 or left_cropping == 0 for i in range(FIXED_PARAMETERS["seq_length"]): if i < len(pos_vector): vector[i + left_padding, pos_vector[i + left_cropping]] = 1 else: break return vector def generate_pos_feature_tensor(parses, left_padding_and_cropping_pairs): pos_vectors = [] for parse in parses: pos = parsing_parse(parse) pos_vector = [(idx, POS_dict.get(tag, 0)) for idx, tag in enumerate(pos)] pos_vectors.append(pos_vector) return construct_one_hot_feature_tensor(pos_vectors, left_padding_and_cropping_pairs, 2, column_size=len(POS_Tagging)) def generate_quora_pos_feature_tensor(parses, left_padding_and_cropping_pairs): pos_vectors = [] for parse in parses: pos = parse.split() pos_vector = [(idx, POS_dict.get(tag, 0)) for idx, tag in enumerate(pos)] pos_vectors.append(pos_vector) return construct_one_hot_feature_tensor(pos_vectors, left_padding_and_cropping_pairs, 2, column_size=len(POS_Tagging)) def generate_crop_pad_pairs(sequences): seq_len = FIXED_PARAMETERS["seq_length"] list_of_pairs = [] for sequence in sequences: left_padding = 0 left_cropping = 0 if len(sequence) < seq_len: left_padding = int(random.uniform(0,1) * (seq_len - len(sequence))) elif len(sequence) > seq_len: left_cropping = int(random.uniform(0,1) * (len(sequence) - seq_len)) list_of_pairs.append((left_padding, left_cropping)) return list_of_pairs def fill_feature_vector_with_cropping_or_padding(sequences, left_padding_and_cropping_pairs, dim, column_size=None, dtype=np.int32): if dim == 1: list_of_vectors = [] for sequence, pad_crop_pair in zip(sequences, left_padding_and_cropping_pairs): vec = np.zeros((config.seq_length)) left_padding, left_cropping = pad_crop_pair for i in range(config.seq_length): if i + left_padding < config.seq_length and i - left_cropping < len(sequence): vec[i + left_padding] = sequence[i + left_cropping] else: break list_of_vectors.append(vec) return np.array(list_of_vectors, dtype=dtype) elif dim == 2: assert column_size tensor_list = [] for sequence, pad_crop_pair in zip(sequences, left_padding_and_cropping_pairs): left_padding, left_cropping = pad_crop_pair mtrx = np.zeros((config.seq_length, column_size)) for row_idx in range(config.seq_length): if row_idx + left_padding < config.seq_length and row_idx < len(sequence) + left_cropping: for col_idx, content in enumerate(sequence[row_idx + left_cropping]): mtrx[row_idx + left_padding, col_idx] = content else: break tensor_list.append(mtrx) return np.array(tensor_list, dtype=dtype) else: raise NotImplementedError def construct_one_hot_feature_tensor(sequences, left_padding_and_cropping_pairs, dim, column_size=None, dtype=np.int32): """ sequences: [[(idx, val)... ()]...[]] left_padding_and_cropping_pairs: [[(0,0)...] ... []] """ tensor_list = [] for sequence, pad_crop_pair in zip(sequences, left_padding_and_cropping_pairs): left_padding, left_cropping = pad_crop_pair if dim == 1: vec = np.zeros((config.seq_length)) for num in sequence: if num + left_padding - left_cropping < config.seq_length and num + left_padding - left_cropping >= 0: vec[num + left_padding - left_cropping] = 1 tensor_list.append(vec) elif dim == 2: assert column_size mtrx = np.zeros((config.seq_length, column_size)) for row, col in sequence: if row + left_padding - left_cropping < config.seq_length and row + left_padding - left_cropping >= 0 and col < column_size: mtrx[row + left_padding - left_cropping, col] = 1 tensor_list.append(mtrx) else: raise NotImplementedError return np.array(tensor_list, dtype=dtype) def generate_manual_sample_minibatch(s1_tokenize, s2_tokenize, word_indices, char_indices): nst = StanfordNERTagger('/home/users/yichen.gong/Stanford/stanford-ner-2014-08-27/classifiers/english.muc.7class.distsim.crf.ser.gz', '//home/users/yichen.gong/Stanford/stanford-ner-2014-08-27/stanford-ner.jar',encoding='utf-8') pst = StanfordPOSTagger('/home/users/yichen.gong/Stanford/stanford-postagger-2014-08-27/models/english-bidirectional-distsim.tagger', \ '/home/users/yichen.gong/Stanford/stanford-postagger-2014-08-27/stanford-postagger.jar') premise_vectors = np.zeros((1, config.seq_length)) hypothesis_vectors = np.zeros((1, config.seq_length)) premise_char_vectors = np.zeros((1, config.seq_length, config.char_in_word_size)) hypothesis_char_vectors = np.zeros((1, config.seq_length, config.char_in_word_size)) premise_exact_match = np.zeros((1, config.seq_length)) hypothesis_exact_match = np.zeros((1, config.seq_length)) for idx, w1 in enumerate(s1_tokenize): premise_vectors[0, idx] = word_indices.get(w1, 0) for ci, c in enumerate(w1): premise_char_vectors[0, idx, ci] = char_indices.get(c, 0) for s2idx, w2 in enumerate(s2_tokenize): if is_exact_match(w1, w2): premise_exact_match[0, idx] = 1 hypothesis_exact_match[0, s2idx] = 1 for idx, w2 in enumerate(s2_tokenize): hypothesis_vectors[0, idx] = word_indices.get(w2, 0) for ci, c in enumerate(w2): hypothesis_char_vectors[0, idx, ci] = char_indices.get(c, 0) premise_pos_vectors = np.zeros((1, config.seq_length, len(POS_dict.keys()))) hypothesis_pos_vectors = np.zeros((1, config.seq_length, len(POS_dict.keys()))) s1_pos = pst.tag(s1_tokenize) s2_pos = pst.tag(s2_tokenize) for idx, pair in enumerate(s1_pos): word, tag = pair premise_pos_vectors[0, idx, POS_dict[tag]] = 1 for idx, pair in enumerate(s2_pos): word, tag = pair hypothesis_pos_vectors[0, idx, POS_dict[tag]] = 1 # s1_ner = nst.tag(s1_tokenize) # s2_ner = nst.tag(s2_tokenize) # not used labels = np.zeros((1)) genres = np.zeros((1)) pairIDs = np.zeros((1)) premise_inverse_term_frequency = np.zeros((1, config.seq_length, 1), dtype=np.float32) hypothesis_inverse_term_frequency = np.zeros((1, config.seq_length, 1), dtype=np.float32) premise_antonym_feature = np.zeros((1, config.seq_length)) hypothesis_antonym_feature = np.zeros((1, config.seq_length)) premise_NER_feature = np.zeros((1, config.seq_length, 7)) hypothesis_NER_feature = np.zeros((1, config.seq_length, 7)) premise_exact_match = np.expand_dims(premise_exact_match, 2) hypothesis_exact_match = np.expand_dims(hypothesis_exact_match, 2) premise_antonym_feature = np.expand_dims(premise_antonym_feature, 2) hypothesis_antonym_feature = np.expand_dims(hypothesis_antonym_feature, 2) return premise_vectors, hypothesis_vectors, labels, genres, premise_pos_vectors, \ hypothesis_pos_vectors, pairIDs, premise_char_vectors, hypothesis_char_vectors, \ premise_exact_match, hypothesis_exact_match, premise_inverse_term_frequency, hypothesis_inverse_term_frequency, \ premise_antonym_feature, hypothesis_antonym_feature, premise_NER_feature, hypothesis_NER_feature def loadEmbedding_zeros(path, word_indices): """ Load GloVe embeddings. Initializng OOV words to vector of zeros. """ emb = np.zeros((len(word_indices), FIXED_PARAMETERS["word_embedding_dim"]), dtype='float32') with open(path, 'r') as f: for i, line in enumerate(f): if FIXED_PARAMETERS["embeddings_to_load"] != None: if i >= FIXED_PARAMETERS["embeddings_to_load"]: break s = line.split() if s[0] in word_indices: emb[word_indices[s[0]], :] = np.asarray(s[1:]) return emb def loadEmbedding_fully_rand(path, word_indices, divident = 1.0): n = len(word_indices) m = FIXED_PARAMETERS["word_embedding_dim"] emb = np.empty((n, m), dtype=np.float32) emb[:,:] = np.random.normal(size=(n,m)) / divident # Explicitly assign embedding of <PAD> to be zeros. emb[0, :] = np.zeros((1,m), dtype="float32") return emb def loadEmbedding_rand(path, word_indices, divident = 1.0): # TODO double embedding """ Load GloVe embeddings. Doing a random normal initialization for OOV words. """ j = 0 n = len(word_indices) m = FIXED_PARAMETERS["word_embedding_dim"] emb = np.empty((n, m), dtype=np.float32) emb[:,:] = np.random.normal(size=(n,m)) / divident # Explicitly assign embedding of <PAD> to be zeros. emb[0, :] = np.zeros((1,m), dtype="float32") with open(path, 'r', encoding='utf-8') as f: for i, line in enumerate(f): if FIXED_PARAMETERS["embeddings_to_load"] != None: if i >= FIXED_PARAMETERS["embeddings_to_load"]: break s = line.split() if s[0] in word_indices: try: emb[word_indices[s[0]], :] = np.asarray(s[1:]) except ValueError: print(s[0]) continue return emb def all_lemmas(token): t = token.lower() lemmas = [] for synsets in wn.synsets(t): for lemma in synsets.lemma_names(): lemmas.append(lemma) return list(set(lemmas)) def loadEmbedding_with_lemma(path, word_indices): j = 0 n = len(word_indices) m = FIXED_PARAMETERS["word_embedding_dim"] emb = np.empty((n, m), dtype=np.float32) emb[:,:] = np.random.normal(size=(n,m)) # Explicitly assign embedding of <PAD> to be zeros. emb[0, :] = np.zeros((1,m), dtype="float32") records = np.zeros((n)) indices_to_words = [""] * n for key, val in word_indices.items(): indices_to_words[val] = key print("OOV words: {}".format(n - np.sum(records) - 1)) print("Loading embedding for first round") with open(path, 'r') as f: for i, line in tqdm(enumerate(f)): if FIXED_PARAMETERS["embeddings_to_load"] != None: if i >= FIXED_PARAMETERS["embeddings_to_load"]: break s = line.split() if s[0] in word_indices: try: emb[word_indices[s[0]], :] = np.asarray(s[1:]) records[word_indices[s[0]]] = 1 except ValueError: print(s[0]) continue print("OOV words: {}".format(n - np.sum(records) - 1)) print("Building OOV lemma sets") OOV_word_indices = {} for i in range(n): if records[i] == 0: for lemma in all_lemmas(indices_to_words[i]): try: OOV_word_indices[lemma].append(i) except: OOV_word_indices[lemma] = [i] print("Loading embedding for second round") with open(path, 'r') as f: for i, line in tqdm(enumerate(f)): if FIXED_PARAMETERS["embeddings_to_load"] != None: if i >= FIXED_PARAMETERS["embeddings_to_load"]: break s = line.split() if s[0] in OOV_word_indices: for idx in OOV_word_indices[s[0]]: if records[idx] == 0: try: emb[idx, :] = np.asarray(s[1:]) records[idx] = 1 except ValueError: print(s[0]) continue print("OOV words: {}".format(n - np.sum(records) - 1)) return emb def save_submission(path, ids, pred_ids): assert(ids.shape[0] == pred_ids.shape[0]) reverse_label_map = {str(value): key for key, value in LABEL_MAP.items()} f = open(path, 'w') f.write("pairID,gold_label\n") for i in range(ids.shape[0]): pred = pred_ids[i] if not config.force_multi_classes else pred_ids[i] / config.forced_num_multi_classes f.write("{},{}\n".format(str(ids[i]), reverse_label_map[str(pred)])) f.close()
im2rec.py
import os import sys curr_path = os.path.abspath(os.path.dirname(__file__)) sys.path.append(os.path.join(curr_path, "../python")) import mxnet as mx import random import numpy as np import argparse import threading import cv, cv2 import time def list_image(root, recursive, exts): image_list = [] if recursive: cat = {} for path, subdirs, files in os.walk(root): print(len(cat), path) for fname in files: fpath = os.path.join(path, fname) suffix = os.path.splitext(fname)[1].lower() if os.path.isfile(fpath) and (suffix in exts): if path not in cat: cat[path] = len(cat) image_list.append((len(image_list), os.path.relpath(fpath, root), cat[path])) else: for fname in os.listdir(root): fpath = os.path.join(root, fname) suffix = os.path.splitext(fname)[1].lower() if os.path.isfile(fpath) and (suffix in exts): image_list.append((len(image_list), os.path.relpath(fpath, root), 0)) return image_list def write_list(path_out, image_list): with open(path_out, 'w') as fout: for i in xrange(len(image_list)): line = '%d\t'%image_list[i][0] for j in image_list[i][2:]: line += '%f\t'%j line += '%s\n'%image_list[i][1] fout.write(line) def make_list(prefix_out, root, recursive, exts, num_chunks, train_ratio): image_list = list_image(root, recursive, exts) random.shuffle(image_list) N = len(image_list) chunk_size = (N+num_chunks-1)/num_chunks for i in xrange(num_chunks): chunk = image_list[i*chunk_size:(i+1)*chunk_size] if num_chunks > 1: str_chunk = '_%d'%i else: str_chunk = '' if train_ratio < 1: sep = int(chunk_size*train_ratio) write_list(prefix_out+str_chunk+'_train.lst', chunk[:sep]) write_list(prefix_out+str_chunk+'_val.lst', chunk[sep:]) else: write_list(prefix_out+str_chunk+'.lst', chunk) def read_list(path_in): image_list = [] with open(path_in) as fin: for line in fin.readlines(): line = [i.strip() for i in line.strip().split('\t')] item = [int(line[0])] + [line[-1]] + [float(i) for i in line[1:-1]] image_list.append(item) return image_list def write_record(args, image_list): source = image_list tic = [time.time()] color_modes = {-1: cv2.IMREAD_UNCHANGED, 0: cv2.IMREAD_GRAYSCALE, 1: cv2.IMREAD_COLOR} total = len(source) def image_encode(item, q_out): try: img = cv2.imread(os.path.join(args.root, item[1]), color_modes[args.color]) except: print 'imread error:', item[1] return if img == None: print 'read none error:', item[1] return if args.center_crop: if img.shape[0] > img.shape[1]: margin = (img.shape[0] - img.shape[1])/2; img = img[margin:margin+img.shape[1], :] else: margin = (img.shape[1] - img.shape[0])/2; img = img[:, margin:margin+img.shape[0]] if args.resize: if img.shape[0] > img.shape[1]: newsize = (img.shape[0]*args.resize/img.shape[1], args.resize) else: newsize = (args.resize, img.shape[1]*args.resize/img.shape[0]) img = cv2.resize(img, newsize) header = mx.recordio.IRHeader(0, item[2], item[0], 0) try: s = mx.recordio.pack_img(header, img, quality=args.quality, img_fmt=args.encoding) q_out.put(('data', s, item)) except: print 'pack_img error:',item[1] return def read_worker(q_in, q_out): while not q_in.empty(): item = q_in.get() image_encode(item, q_out) def write_worker(q_out, prefix): pre_time = time.time() sink = [] record = mx.recordio.MXRecordIO(prefix+'.rec', 'w') while True: stat, s, item = q_out.get() if stat == 'finish': write_list(prefix+'.lst', sink) break record.write(s) sink.append(item) if len(sink) % 1000 == 0: cur_time = time.time() print 'time:', cur_time - pre_time, ' count:', len(sink) pre_time = cur_time try: import multiprocessing q_in = [multiprocessing.Queue() for i in range(args.num_thread)] q_out = multiprocessing.Queue(1024) for i in range(len(image_list)): q_in[i % len(q_in)].put(image_list[i]) read_process = [multiprocessing.Process(target=read_worker, args=(q_in[i], q_out)) \ for i in range(args.num_thread)] for p in read_process: p.start() write_process = multiprocessing.Process(target=write_worker, args=(q_out,args.prefix)) write_process.start() for p in read_process: p.join() q_out.put(('finish', '', [])) write_process.join() except ImportError: print('multiprocessing not available, fall back to single threaded encoding') import Queue q_out = Queue.Queue() record = mx.recordio.MXRecordIO(args.prefix+'.rec', 'w') cnt = 0 pre_time = time.time() for item in image_list: image_encode(item, q_out) if q_out.empty(): continue _, s, _ = q_out.get() record.write(s) cnt += 1 if cnt % 1000 == 0: cur_time = time.time() print 'time:', cur_time - pre_time, ' count:', cnt pre_time = cur_time def main(): parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, description='Make an image record database by reading from\ an image list or creating one') parser.add_argument('prefix', help='prefix of input/output files.') parser.add_argument('root', help='path to folder containing images.') cgroup = parser.add_argument_group('Options for creating image lists') cgroup.add_argument('--list', type=bool, default=False, help='If this is set im2rec will create image list(s) by traversing root folder\ and output to <prefix>.lst.\ Otherwise im2rec will read <prefix>.lst and create a database at <prefix>.rec') cgroup.add_argument('--exts', type=list, default=['.jpeg','.jpg'], help='list of acceptable image extensions.') cgroup.add_argument('--chunks', type=int, default=1, help='number of chunks.') cgroup.add_argument('--train_ratio', type=float, default=1.0, help='Ratio of images to use for training.') cgroup.add_argument('--recursive', type=bool, default=False, help='If true recursively walk through subdirs and assign an unique label\ to images in each folder. Otherwise only include images in the root folder\ and give them label 0.') rgroup = parser.add_argument_group('Options for creating database') rgroup.add_argument('--resize', type=int, default=0, help='resize the shorter edge of image to the newsize, original images will\ be packed by default.') rgroup.add_argument('--center_crop', type=bool, default=False, help='specify whether to crop the center image to make it rectangular.') rgroup.add_argument('--quality', type=int, default=80, help='JPEG quality for encoding, 1-100; or PNG compression for encoding, 1-9') rgroup.add_argument('--num_thread', type=int, default=1, help='number of thread to use for encoding. order of images will be different\ from the input list if >1. the input list will be modified to match the\ resulting order.') rgroup.add_argument('--color', type=int, default=1, choices=[-1, 0, 1], help='specify the color mode of the loaded image.\ 1: Loads a color image. Any transparency of image will be neglected. It is the default flag.\ 0: Loads image in grayscale mode.\ -1:Loads image as such including alpha channel.') rgroup.add_argument('--encoding', type=str, default='.jpg', choices=['.jpg', '.png'], help='specify the encoding of the images.') args = parser.parse_args() if args.list: make_list(args.prefix, args.root, args.recursive, args.exts, args.chunks, args.train_ratio) else: image_list = read_list(args.prefix+'.lst') write_record(args, image_list) if __name__ == '__main__': main()
future_object_test.py
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest import parl from parl.remote.master import Master from parl.remote.worker import Worker import time import threading from parl.remote.client import disconnect from parl.remote import exceptions from parl.remote.future_mode import FutureObject from parl.utils import get_free_tcp_port @parl.remote_class(wait=False) class Actor(object): def __init__(self, arg1=None, arg2=None): self.arg1 = arg1 self.arg2 = arg2 def get_arg1(self): return self.arg1 def get_arg2(self): return self.arg2 def get_arg1_after_sleep(self, sleep_seconds): time.sleep(sleep_seconds) return self.arg1 class TestFutureObject(unittest.TestCase): def tearDown(self): disconnect() def test_resulf_of_get_function(self): port = get_free_tcp_port() master = Master(port=port) th = threading.Thread(target=master.run) th.start() time.sleep(3) worker1 = Worker('localhost:{}'.format(port), 1) for _ in range(3): if master.cpu_num == 1: break time.sleep(10) self.assertEqual(1, master.cpu_num) parl.connect('localhost:{}'.format(port)) actor = Actor(arg1=10, arg2=20) future_obj = actor.get_arg1() assert isinstance(future_obj, FutureObject) result = future_obj.get() assert result == 10 future_obj = actor.get_arg2() assert isinstance(future_obj, FutureObject) result = future_obj.get() assert result == 20 master.exit() worker1.exit() def test_calling_get_function_twice(self): port = get_free_tcp_port() master = Master(port=port) th = threading.Thread(target=master.run) th.start() time.sleep(3) worker1 = Worker('localhost:{}'.format(port), 1) for _ in range(3): if master.cpu_num == 1: break time.sleep(10) self.assertEqual(1, master.cpu_num) parl.connect('localhost:{}'.format(port)) actor = Actor() future_obj = actor.get_arg1() result = future_obj.get() with self.assertRaises(exceptions.FutureGetRepeatedlyError): result = future_obj.get() master.exit() worker1.exit() def test_calling_get_function_with_block_false(self): port = get_free_tcp_port() master = Master(port=port) th = threading.Thread(target=master.run) th.start() time.sleep(3) worker1 = Worker('localhost:{}'.format(port), 1) for _ in range(3): if master.cpu_num == 1: break time.sleep(10) self.assertEqual(1, master.cpu_num) parl.connect('localhost:{}'.format(port)) actor = Actor(arg1="arg1") sleep_seconds = 3 future_obj = actor.get_arg1_after_sleep(sleep_seconds=sleep_seconds) with self.assertRaises(exceptions.FutureObjectEmpty): result = future_obj.get(block=False) time.sleep(sleep_seconds + 1) result = future_obj.get(block=False) assert result == "arg1" master.exit() worker1.exit() def test_calling_get_nowait_function(self): port = get_free_tcp_port() master = Master(port=port) th = threading.Thread(target=master.run) th.start() time.sleep(3) worker1 = Worker('localhost:{}'.format(port), 1) for _ in range(3): if master.cpu_num == 1: break time.sleep(10) self.assertEqual(1, master.cpu_num) parl.connect('localhost:{}'.format(port)) actor = Actor(arg1="arg1") sleep_seconds = 3 future_obj = actor.get_arg1_after_sleep(sleep_seconds=sleep_seconds) with self.assertRaises(exceptions.FutureObjectEmpty): result = future_obj.get_nowait() time.sleep(sleep_seconds + 1) result = future_obj.get_nowait() assert result == "arg1" master.exit() worker1.exit() def test_calling_get_function_with_timeout(self): port = get_free_tcp_port() master = Master(port=port) th = threading.Thread(target=master.run) th.start() time.sleep(3) worker1 = Worker('localhost:{}'.format(port), 1) for _ in range(3): if master.cpu_num == 1: break time.sleep(10) self.assertEqual(1, master.cpu_num) parl.connect('localhost:{}'.format(port)) actor = Actor(arg1="arg1") sleep_seconds = 3 future_obj = actor.get_arg1_after_sleep(sleep_seconds=sleep_seconds) with self.assertRaises(exceptions.FutureObjectEmpty): result = future_obj.get(timeout=1) result = future_obj.get(timeout=sleep_seconds + 1) assert result == "arg1" master.exit() worker1.exit() if __name__ == '__main__': unittest.main()
base.py
import base64 import hashlib import io import json import os import threading import traceback import socket import sys from abc import ABCMeta, abstractmethod from six import text_type from six.moves.http_client import HTTPConnection from six.moves.urllib.parse import urljoin, urlsplit, urlunsplit from ..testrunner import Stop from .actions import actions from .protocol import Protocol, BaseProtocolPart here = os.path.dirname(__file__) def executor_kwargs(test_type, server_config, cache_manager, run_info_data, **kwargs): timeout_multiplier = kwargs["timeout_multiplier"] if timeout_multiplier is None: timeout_multiplier = 1 executor_kwargs = {"server_config": server_config, "timeout_multiplier": timeout_multiplier, "debug_info": kwargs["debug_info"]} if test_type in ("reftest", "print-reftest"): executor_kwargs["screenshot_cache"] = cache_manager.dict() if test_type == "wdspec": executor_kwargs["binary"] = kwargs.get("binary") executor_kwargs["webdriver_binary"] = kwargs.get("webdriver_binary") executor_kwargs["webdriver_args"] = kwargs.get("webdriver_args") # By default the executor may try to cleanup windows after a test (to best # associate any problems with the test causing them). If the user might # want to view the results, however, the executor has to skip that cleanup. if kwargs["pause_after_test"] or kwargs["pause_on_unexpected"]: executor_kwargs["cleanup_after_test"] = False return executor_kwargs def strip_server(url): """Remove the scheme and netloc from a url, leaving only the path and any query or fragment. url - the url to strip e.g. http://example.org:8000/tests?id=1#2 becomes /tests?id=1#2""" url_parts = list(urlsplit(url)) url_parts[0] = "" url_parts[1] = "" return urlunsplit(url_parts) class TestharnessResultConverter(object): harness_codes = {0: "OK", 1: "ERROR", 2: "TIMEOUT", 3: "PRECONDITION_FAILED"} test_codes = {0: "PASS", 1: "FAIL", 2: "TIMEOUT", 3: "NOTRUN", 4: "PRECONDITION_FAILED"} def __call__(self, test, result, extra=None): """Convert a JSON result into a (TestResult, [SubtestResult]) tuple""" result_url, status, message, stack, subtest_results = result assert result_url == test.url, ("Got results from %s, expected %s" % (result_url, test.url)) harness_result = test.result_cls(self.harness_codes[status], message, extra=extra, stack=stack) return (harness_result, [test.subtest_result_cls(st_name, self.test_codes[st_status], st_message, st_stack) for st_name, st_status, st_message, st_stack in subtest_results]) testharness_result_converter = TestharnessResultConverter() def hash_screenshots(screenshots): """Computes the sha1 checksum of a list of base64-encoded screenshots.""" return [hashlib.sha1(base64.b64decode(screenshot)).hexdigest() for screenshot in screenshots] def _ensure_hash_in_reftest_screenshots(extra): """Make sure reftest_screenshots have hashes. Marionette internal reftest runner does not produce hashes. """ log_data = extra.get("reftest_screenshots") if not log_data: return for item in log_data: if type(item) != dict: # Skip relation strings. continue if "hash" not in item: item["hash"] = hash_screenshots([item["screenshot"]])[0] def get_pages(ranges_value, total_pages): """Get a set of page numbers to include in a print reftest. :param ranges_value: Parsed page ranges as a list e.g. [[1,2], [4], [6,None]] :param total_pages: Integer total number of pages in the paginated output. :retval: Set containing integer page numbers to include in the comparison e.g. for the example ranges value and 10 total pages this would be {1,2,4,6,7,8,9,10}""" if not ranges_value: return set(range(1, total_pages + 1)) rv = set() for range_limits in ranges_value: if len(range_limits) == 1: range_limits = [range_limits[0], range_limits[0]] if range_limits[0] is None: range_limits[0] = 1 if range_limits[1] is None: range_limits[1] = total_pages if range_limits[0] > total_pages: continue rv |= set(range(range_limits[0], range_limits[1] + 1)) return rv def reftest_result_converter(self, test, result): extra = result.get("extra", {}) _ensure_hash_in_reftest_screenshots(extra) return (test.result_cls( result["status"], result["message"], extra=extra, stack=result.get("stack")), []) def pytest_result_converter(self, test, data): harness_data, subtest_data = data if subtest_data is None: subtest_data = [] harness_result = test.result_cls(*harness_data) subtest_results = [test.subtest_result_cls(*item) for item in subtest_data] return (harness_result, subtest_results) def crashtest_result_converter(self, test, result): return test.result_cls(**result), [] class ExecutorException(Exception): def __init__(self, status, message): self.status = status self.message = message class TimedRunner(object): def __init__(self, logger, func, protocol, url, timeout, extra_timeout): self.func = func self.logger = logger self.result = None self.protocol = protocol self.url = url self.timeout = timeout self.extra_timeout = extra_timeout self.result_flag = threading.Event() def run(self): if self.set_timeout() is Stop: return Stop if self.before_run() is Stop: return Stop executor = threading.Thread(target=self.run_func) executor.start() # Add twice the extra timeout since the called function is expected to # wait at least self.timeout + self.extra_timeout and this gives some leeway timeout = self.timeout + 2 * self.extra_timeout if self.timeout else None finished = self.result_flag.wait(timeout) if self.result is None: if finished: # flag is True unless we timeout; this *shouldn't* happen, but # it can if self.run_func fails to set self.result due to raising self.result = False, ("INTERNAL-ERROR", "%s.run_func didn't set a result" % self.__class__.__name__) else: if self.protocol.is_alive(): message = "Executor hit external timeout (this may indicate a hang)\n" # get a traceback for the current stack of the executor thread message += "".join(traceback.format_stack(sys._current_frames()[executor.ident])) self.result = False, ("EXTERNAL-TIMEOUT", message) else: self.logger.info("Browser not responding, setting status to CRASH") self.result = False, ("CRASH", None) elif self.result[1] is None: # We didn't get any data back from the test, so check if the # browser is still responsive if self.protocol.is_alive(): self.result = False, ("INTERNAL-ERROR", None) else: self.logger.info("Browser not responding, setting status to CRASH") self.result = False, ("CRASH", None) return self.result def set_timeout(self): raise NotImplementedError def before_run(self): pass def run_func(self): raise NotImplementedError class TestExecutor(object): """Abstract Base class for object that actually executes the tests in a specific browser. Typically there will be a different TestExecutor subclass for each test type and method of executing tests. :param browser: ExecutorBrowser instance providing properties of the browser that will be tested. :param server_config: Dictionary of wptserve server configuration of the form stored in TestEnvironment.config :param timeout_multiplier: Multiplier relative to base timeout to use when setting test timeout. """ __metaclass__ = ABCMeta test_type = None convert_result = None supports_testdriver = False supports_jsshell = False # Extra timeout to use after internal test timeout at which the harness # should force a timeout extra_timeout = 5 # seconds def __init__(self, logger, browser, server_config, timeout_multiplier=1, debug_info=None, **kwargs): self.logger = logger self.runner = None self.browser = browser self.server_config = server_config self.timeout_multiplier = timeout_multiplier self.debug_info = debug_info self.last_environment = {"protocol": "http", "prefs": {}} self.protocol = None # This must be set in subclasses def setup(self, runner): """Run steps needed before tests can be started e.g. connecting to browser instance :param runner: TestRunner instance that is going to run the tests""" self.runner = runner if self.protocol is not None: self.protocol.setup(runner) def teardown(self): """Run cleanup steps after tests have finished""" if self.protocol is not None: self.protocol.teardown() def reset(self): """Re-initialize internal state to facilitate repeated test execution as implemented by the `--rerun` command-line argument.""" pass def run_test(self, test): """Run a particular test. :param test: The test to run""" if test.environment != self.last_environment: self.on_environment_change(test.environment) try: result = self.do_test(test) except Exception as e: exception_string = traceback.format_exc() self.logger.warning(exception_string) result = self.result_from_exception(test, e, exception_string) if result is Stop: return result # log result of parent test if result[0].status == "ERROR": self.logger.debug(result[0].message) self.last_environment = test.environment self.runner.send_message("test_ended", test, result) def server_url(self, protocol): scheme = "https" if protocol == "h2" else protocol return "%s://%s:%s" % (scheme, self.server_config["browser_host"], self.server_config["ports"][protocol][0]) def test_url(self, test): return urljoin(self.server_url(test.environment["protocol"]), test.url) @abstractmethod def do_test(self, test): """Test-type and protocol specific implementation of running a specific test. :param test: The test to run.""" pass def on_environment_change(self, new_environment): pass def result_from_exception(self, test, e, exception_string): if hasattr(e, "status") and e.status in test.result_cls.statuses: status = e.status else: status = "INTERNAL-ERROR" message = text_type(getattr(e, "message", "")) if message: message += "\n" message += exception_string return test.result_cls(status, message), [] def wait(self): self.protocol.base.wait() class TestharnessExecutor(TestExecutor): convert_result = testharness_result_converter class RefTestExecutor(TestExecutor): convert_result = reftest_result_converter is_print = False def __init__(self, logger, browser, server_config, timeout_multiplier=1, screenshot_cache=None, debug_info=None, **kwargs): TestExecutor.__init__(self, logger, browser, server_config, timeout_multiplier=timeout_multiplier, debug_info=debug_info) self.screenshot_cache = screenshot_cache class CrashtestExecutor(TestExecutor): convert_result = crashtest_result_converter class PrintRefTestExecutor(TestExecutor): convert_result = reftest_result_converter is_print = True class RefTestImplementation(object): def __init__(self, executor): self.timeout_multiplier = executor.timeout_multiplier self.executor = executor # Cache of url:(screenshot hash, screenshot). Typically the # screenshot is None, but we set this value if a test fails # and the screenshot was taken from the cache so that we may # retrieve the screenshot from the cache directly in the future self.screenshot_cache = self.executor.screenshot_cache self.message = None def setup(self): pass def teardown(self): pass @property def logger(self): return self.executor.logger def get_hash(self, test, viewport_size, dpi, page_ranges): key = (test.url, viewport_size, dpi) if key not in self.screenshot_cache: success, data = self.get_screenshot_list(test, viewport_size, dpi, page_ranges) if not success: return False, data screenshots = data hash_values = hash_screenshots(data) self.screenshot_cache[key] = (hash_values, screenshots) rv = (hash_values, screenshots) else: rv = self.screenshot_cache[key] self.message.append("%s %s" % (test.url, rv[0])) return True, rv def reset(self): self.screenshot_cache.clear() def check_pass(self, hashes, screenshots, urls, relation, fuzzy): """Check if a test passes, and return a tuple of (pass, page_idx), where page_idx is the zero-based index of the first page on which a difference occurs if any, or None if there are no differences""" assert relation in ("==", "!=") lhs_hashes, rhs_hashes = hashes lhs_screenshots, rhs_screenshots = screenshots if len(lhs_hashes) != len(rhs_hashes): self.logger.info("Got different number of pages") return False assert len(lhs_screenshots) == len(lhs_hashes) == len(rhs_screenshots) == len(rhs_hashes) for (page_idx, (lhs_hash, rhs_hash, lhs_screenshot, rhs_screenshot)) in enumerate(zip(lhs_hashes, rhs_hashes, lhs_screenshots, rhs_screenshots)): comparison_screenshots = (lhs_screenshot, rhs_screenshot) if not fuzzy or fuzzy == ((0, 0), (0, 0)): equal = lhs_hash == rhs_hash # sometimes images can have different hashes, but pixels can be identical. if not equal: self.logger.info("Image hashes didn't match%s, checking pixel differences" % ("" if len(hashes) == 1 else " on page %i" % (page_idx + 1))) max_per_channel, pixels_different = self.get_differences(comparison_screenshots, urls) equal = pixels_different == 0 and max_per_channel == 0 else: max_per_channel, pixels_different = self.get_differences(comparison_screenshots, urls, page_idx if len(hashes) > 1 else None) allowed_per_channel, allowed_different = fuzzy self.logger.info("Allowed %s pixels different, maximum difference per channel %s" % ("-".join(str(item) for item in allowed_different), "-".join(str(item) for item in allowed_per_channel))) equal = ((pixels_different == 0 and allowed_different[0] == 0) or (max_per_channel == 0 and allowed_per_channel[0] == 0) or (allowed_per_channel[0] <= max_per_channel <= allowed_per_channel[1] and allowed_different[0] <= pixels_different <= allowed_different[1])) if not equal: return (False if relation == "==" else True, page_idx) # All screenshots were equal within the fuzziness return (True if relation == "==" else False, None) def get_differences(self, screenshots, urls, page_idx=None): from PIL import Image, ImageChops, ImageStat lhs = Image.open(io.BytesIO(base64.b64decode(screenshots[0]))).convert("RGB") rhs = Image.open(io.BytesIO(base64.b64decode(screenshots[1]))).convert("RGB") self.check_if_solid_color(lhs, urls[0]) self.check_if_solid_color(rhs, urls[1]) diff = ImageChops.difference(lhs, rhs) minimal_diff = diff.crop(diff.getbbox()) mask = minimal_diff.convert("L", dither=None) stat = ImageStat.Stat(minimal_diff, mask) per_channel = max(item[1] for item in stat.extrema) count = stat.count[0] self.logger.info("Found %s pixels different, maximum difference per channel %s%s" % (count, per_channel, "" if page_idx is None else " on page %i" % (page_idx + 1))) return per_channel, count def check_if_solid_color(self, image, url): extrema = image.getextrema() if all(min == max for min, max in extrema): color = ''.join('%02X' % value for value, _ in extrema) self.message.append("Screenshot is solid color 0x%s for %s\n" % (color, url)) def run_test(self, test): viewport_size = test.viewport_size dpi = test.dpi page_ranges = test.page_ranges self.message = [] # Depth-first search of reference tree, with the goal # of reachings a leaf node with only pass results stack = list(((test, item[0]), item[1]) for item in reversed(test.references)) page_idx = None while stack: hashes = [None, None] screenshots = [None, None] urls = [None, None] nodes, relation = stack.pop() fuzzy = self.get_fuzzy(test, nodes, relation) for i, node in enumerate(nodes): success, data = self.get_hash(node, viewport_size, dpi, page_ranges) if success is False: return {"status": data[0], "message": data[1]} hashes[i], screenshots[i] = data urls[i] = node.url is_pass, page_idx = self.check_pass(hashes, screenshots, urls, relation, fuzzy) if is_pass: fuzzy = self.get_fuzzy(test, nodes, relation) if nodes[1].references: stack.extend(list(((nodes[1], item[0]), item[1]) for item in reversed(nodes[1].references))) else: # We passed return {"status": "PASS", "message": None} # We failed, so construct a failure message if page_idx is None: # default to outputting the last page page_idx = -1 for i, (node, screenshot) in enumerate(zip(nodes, screenshots)): if screenshot is None: success, screenshot = self.retake_screenshot(node, viewport_size, dpi, page_ranges) if success: screenshots[i] = screenshot log_data = [ {"url": nodes[0].url, "screenshot": screenshots[0][page_idx], "hash": hashes[0][page_idx]}, relation, {"url": nodes[1].url, "screenshot": screenshots[1][page_idx], "hash": hashes[1][page_idx]}, ] return {"status": "FAIL", "message": "\n".join(self.message), "extra": {"reftest_screenshots": log_data}} def get_fuzzy(self, root_test, test_nodes, relation): full_key = tuple([item.url for item in test_nodes] + [relation]) ref_only_key = test_nodes[1].url fuzzy_override = root_test.fuzzy_override fuzzy = test_nodes[0].fuzzy sources = [fuzzy_override, fuzzy] keys = [full_key, ref_only_key, None] value = None for source in sources: for key in keys: if key in source: value = source[key] break if value: break return value def retake_screenshot(self, node, viewport_size, dpi, page_ranges): success, data = self.get_screenshot_list(node, viewport_size, dpi, page_ranges) if not success: return False, data key = (node.url, viewport_size, dpi) hash_val, _ = self.screenshot_cache[key] self.screenshot_cache[key] = hash_val, data return True, data def get_screenshot_list(self, node, viewport_size, dpi, page_ranges): success, data = self.executor.screenshot(node, viewport_size, dpi, page_ranges) if success and not isinstance(data, list): return success, [data] return success, data class WdspecExecutor(TestExecutor): convert_result = pytest_result_converter protocol_cls = None def __init__(self, logger, browser, server_config, webdriver_binary, webdriver_args, timeout_multiplier=1, capabilities=None, debug_info=None, **kwargs): self.do_delayed_imports() TestExecutor.__init__(self, logger, browser, server_config, timeout_multiplier=timeout_multiplier, debug_info=debug_info) self.webdriver_binary = webdriver_binary self.webdriver_args = webdriver_args self.timeout_multiplier = timeout_multiplier self.capabilities = capabilities self.protocol = self.protocol_cls(self, browser) def is_alive(self): return self.protocol.is_alive() def on_environment_change(self, new_environment): pass def do_test(self, test): timeout = test.timeout * self.timeout_multiplier + self.extra_timeout success, data = WdspecRun(self.do_wdspec, self.protocol.session_config, test.abs_path, timeout).run() if success: return self.convert_result(test, data) return (test.result_cls(*data), []) def do_wdspec(self, session_config, path, timeout): return pytestrunner.run(path, self.server_config, session_config, timeout=timeout) def do_delayed_imports(self): global pytestrunner from . import pytestrunner class WdspecRun(object): def __init__(self, func, session, path, timeout): self.func = func self.result = (None, None) self.session = session self.path = path self.timeout = timeout self.result_flag = threading.Event() def run(self): """Runs function in a thread and interrupts it if it exceeds the given timeout. Returns (True, (Result, [SubtestResult ...])) in case of success, or (False, (status, extra information)) in the event of failure. """ executor = threading.Thread(target=self._run) executor.start() self.result_flag.wait(self.timeout) if self.result[1] is None: self.result = False, ("EXTERNAL-TIMEOUT", None) return self.result def _run(self): try: self.result = True, self.func(self.session, self.path, self.timeout) except (socket.timeout, IOError): self.result = False, ("CRASH", None) except Exception as e: message = getattr(e, "message") if message: message += "\n" message += traceback.format_exc() self.result = False, ("INTERNAL-ERROR", message) finally: self.result_flag.set() class ConnectionlessBaseProtocolPart(BaseProtocolPart): def load(self, url): pass def execute_script(self, script, asynchronous=False): pass def set_timeout(self, timeout): pass def wait(self): pass def set_window(self, handle): pass class ConnectionlessProtocol(Protocol): implements = [ConnectionlessBaseProtocolPart] def connect(self): pass def after_connect(self): pass class WdspecProtocol(Protocol): server_cls = None implements = [ConnectionlessBaseProtocolPart] def __init__(self, executor, browser): Protocol.__init__(self, executor, browser) self.webdriver_binary = executor.webdriver_binary self.webdriver_args = executor.webdriver_args self.capabilities = self.executor.capabilities self.session_config = None self.server = None def connect(self): """Connect to browser via the HTTP server.""" self.server = self.server_cls( self.logger, binary=self.webdriver_binary, args=self.webdriver_args) self.server.start(block=False) self.logger.info( "WebDriver HTTP server listening at %s" % self.server.url) self.session_config = {"host": self.server.host, "port": self.server.port, "capabilities": self.capabilities} def after_connect(self): pass def teardown(self): if self.server is not None and self.server.is_alive(): self.server.stop() def is_alive(self): """Test that the connection is still alive. Because the remote communication happens over HTTP we need to make an explicit request to the remote. It is allowed for WebDriver spec tests to not have a WebDriver session, since this may be what is tested. An HTTP request to an invalid path that results in a 404 is proof enough to us that the server is alive and kicking. """ conn = HTTPConnection(self.server.host, self.server.port) conn.request("HEAD", self.server.base_path + "invalid") res = conn.getresponse() return res.status == 404 class CallbackHandler(object): """Handle callbacks from testdriver-using tests. The default implementation here makes sense for things that are roughly like WebDriver. Things that are more different to WebDriver may need to create a fully custom implementation.""" unimplemented_exc = (NotImplementedError,) def __init__(self, logger, protocol, test_window): self.protocol = protocol self.test_window = test_window self.logger = logger self.callbacks = { "action": self.process_action, "complete": self.process_complete } self.actions = {cls.name: cls(self.logger, self.protocol) for cls in actions} def __call__(self, result): url, command, payload = result self.logger.debug("Got async callback: %s" % result[1]) try: callback = self.callbacks[command] except KeyError: raise ValueError("Unknown callback type %r" % result[1]) return callback(url, payload) def process_complete(self, url, payload): rv = [strip_server(url)] + payload return True, rv def process_action(self, url, payload): action = payload["action"] self.logger.debug("Got action: %s" % action) try: action_handler = self.actions[action] except KeyError: raise ValueError("Unknown action %s" % action) try: result = action_handler(payload) except self.unimplemented_exc: self.logger.warning("Action %s not implemented" % action) self._send_message("complete", "error", "Action %s not implemented" % action) except Exception: self.logger.warning("Action %s failed" % action) self.logger.warning(traceback.format_exc()) self._send_message("complete", "error") raise else: self.logger.debug("Action %s completed with result %s" % (action, result)) return_message = {"result": result} self._send_message("complete", "success", json.dumps(return_message)) return False, None def _send_message(self, message_type, status, message=None): self.protocol.testdriver.send_message(message_type, status, message=message)
service_table.py
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import threading import time import sys from .redis_store import RedisStore logging.basicConfig( level=logging.DEBUG, format="[%(levelname)s %(asctime)s %(filename)s:%(lineno)d] %(message)s") class ServiceTable(object): def __init__(self, ip='127.0.0.1', port=6379, passwd=None, backend='redis'): if backend is 'redis': self._store = RedisStore(ip, port, passwd) elif backend is 'etcd': # Todo self._store = RedisStore(ip, port, passwd) self._fd_to_service_name = {} # service_name to set(fd) self._service_name_to_fds = {} # Todo. change to ReadWrite Lock. {service_name: lock} # self._service_name_to_fds mutex self._mutex = threading.RLock() self._service_name_to_update = {} # cache from store self._service_name_to_servers = {} # Todo. Balance assign # {fd: set(servers), } self._fd_to_servers = {} self._server_to_fds = {} self._fd_to_version = {} self._fd_to_max_num = {} def is_servers_update(self, fd, version): new_version = self._fd_to_version[fd] if new_version > version: # is update return new_version, list(self._fd_to_servers[fd]) else: return new_version, None def get_servers(self, fd, num): if fd not in self._fd_to_service_name: # not register return [] service_name = self._fd_to_service_name[fd] if service_name not in self._service_name_to_servers or \ self._service_name_to_update[service_name] is True: self._refresh_service(service_name) return list(self._fd_to_servers[fd]) def add_service_name(self, fd, service_name, num): self._fd_to_servers[fd] = set() self._fd_to_version[fd] = 0 self._fd_to_max_num[fd] = num print('fd={}, service_name={}, max_num={}'.format(fd, service_name, num)) self._fd_to_service_name[fd] = service_name with self._mutex: if service_name not in self._service_name_to_fds: self._service_name_to_fds[service_name] = {fd} else: self._service_name_to_fds[service_name].add(fd) self._service_name_to_update[service_name] = True def rm_service_name(self, fd): # client maybe exit before register if fd not in self._fd_to_service_name: return service_name = self._fd_to_service_name[fd] with self._mutex: if service_name in self._service_name_to_fds: try: self._service_name_to_fds[service_name].remove(fd) except KeyError: pass finally: if len(self._service_name_to_fds[service_name]) == 0: del self._service_name_to_fds[service_name] del self._service_name_to_update[service_name] else: self._service_name_to_update[service_name] = True del self._fd_to_service_name[fd] del self._fd_to_max_num[fd] del self._fd_to_version[fd] for server in self._fd_to_servers[fd]: self._server_to_fds[server].remove(fd) del self._fd_to_servers[fd] def _refresh_service(self, service_name): if service_name not in self._service_name_to_servers: old_servers = [] else: # list [ip_port0, ip_port1, ..., ] old_servers = self._service_name_to_servers[service_name] # list [ {'info': info, 'server': ip_port}, ..., } server_infos = self._store.get_service(service_name) # list [ip_port, ..., ]. Todo. info servers = [] for s in server_infos: try: server = s['server'] servers.append(server) except KeyError: # when get service, server may expired continue # update servers in service_name self._service_name_to_servers[service_name] = servers rm_servers = set(old_servers) - set(servers) add_servers = set(servers) - set(old_servers) # no change if len(rm_servers) == 0 and len(add_servers) == 0 and \ self._service_name_to_update[service_name] is False: return self._service_name_to_update[service_name] = False update_fd = set() # remove server for server in rm_servers: if server not in self._server_to_fds: sys.stdout.write('{} not in server_to_fds'.format(server)) continue # remove server in fd for fd in self._server_to_fds[server]: try: self._fd_to_servers[fd].remove(server) # update fd update_fd.add(fd) except KeyError: sys.stdout.write('{} not in fd={} servers'.format(server, fd)) # remove server to fds del self._server_to_fds[server] # _service_name_to_fds maybe removed with self._mutex: if service_name not in self._service_name_to_fds: for fd in update_fd: self._fd_to_version[fd] += 1 return fd_num = len(self._service_name_to_fds[service_name]) # print('fd_num={}'.format(fd_num)) server_num = len(self._service_name_to_servers[service_name]) if server_num == 0: print('service={} server_num=0'.format(service_name)) for fd in update_fd: self._fd_to_version[fd] += 1 return # assume: fd_num=3, server_num=97 # assign: {fd0:32, fd1:32, fd2:32} server_max_connect = int((fd_num + server_num - 1) / server_num) fd_max_connect = max(1, int(server_num / fd_num)) #fd_max_connect = int((server_num + fd_num - 1) / fd_num) print('fd_num={}, server_num={}, smax={}, mcon={}'.format( fd_num, server_num, server_max_connect, fd_max_connect)) # server_conn = [] # [(num, server)] # rebalance # limit connect of server for server in servers: if server not in self._server_to_fds: self._server_to_fds[server] = set() while len(self._server_to_fds[server]) > server_max_connect: fd = self._server_to_fds[server].pop() self._fd_to_servers[fd].remove(server) update_fd.add(fd) print('pop fd={} server={}'.format(fd, server)) try: fds = self._service_name_to_fds[service_name] for fd in fds: max_connect = min(fd_max_connect, self._fd_to_max_num[fd]) logging.info('fd={} max_connect={}'.format(fd, max_connect)) if fd not in self._fd_to_servers: self._fd_to_servers[fd] = set() # limit connect of fd while len(self._fd_to_servers[fd]) > max_connect: server = self._fd_to_servers[fd].pop() self._server_to_fds[server].remove(fd) update_fd.add(fd) logging.info('pop1 fd={} server={}'.format(fd, server)) # fd greed connect with server for fd in fds: max_connect = min(fd_max_connect, self._fd_to_max_num[fd]) for server in servers: if len(self._fd_to_servers[fd]) >= max_connect: break # have server or server connect is max, continue if server in self._fd_to_servers[fd]: continue if len(self._server_to_fds[server]) >= server_max_connect: continue self._fd_to_servers[fd].add(server) self._server_to_fds[server].add(fd) update_fd.add(fd) logging.info('add fd={} server={}'.format(fd, server)) except Exception as e: sys.stderr.write(str(e) + '\n') for fd in update_fd: self._fd_to_version[fd] += 1 def _refresh(self): while True: old_service_names = self._service_name_to_servers.keys() # Todo. ReadWrite Lock. Read Lock. # may be out-of-data, but if doesn't affect service_names = self._service_name_to_fds.keys() # refresh & add services for service_name in service_names: self._refresh_service(service_name) # rm service_name rm_service_names = set(old_service_names) - set(service_names) for service_name in rm_service_names: sys.stderr.write('Remove monitoring service={}\n'.format( service_name)) try: del self._service_name_to_servers[service_name] except KeyError: pass time.sleep(2) def start(self): self._thread = threading.Thread(target=self._refresh) self._thread.daemon = True self._thread.start()
test_all.py
#! /usr/bin/env python3 """Brute-force test script: test libpqxx against many compilers etc. This script makes no changes in the source tree; all builds happen in temporary directories. To make this possible, you may need to run "make distclean" in the source tree. The configure script will refuse to configure otherwise. """ # Without this, pocketlint does not yet understand the print function. from __future__ import print_function from abc import ( ABCMeta, abstractmethod, ) from argparse import ArgumentParser from contextlib import contextmanager from datetime import datetime from functools import partial import json from multiprocessing import ( JoinableQueue, Process, Queue, ) from multiprocessing.pool import ( Pool, ) from os import ( cpu_count, getcwd, ) import os.path from queue import Empty from shutil import rmtree from subprocess import ( CalledProcessError, check_call, check_output, DEVNULL, ) from sys import ( stderr, stdout, ) from tempfile import mkdtemp from textwrap import dedent from traceback import print_exc CPUS = cpu_count() GCC_VERSIONS = list(range(8, 12)) GCC = ['g++-%d' % ver for ver in GCC_VERSIONS] CLANG_VERSIONS = list(range(7, 12)) CLANG = ['clang++-6.0'] + ['clang++-%d' % ver for ver in CLANG_VERSIONS] CXX = GCC + CLANG STDLIB = ( '', '-stdlib=libc++', ) OPT = ('-O0', '-O3') LINK = { 'static': ['--enable-static', '--disable-shared'], 'dynamic': ['--disable-static', '--enable-shared'], } DEBUG = { 'plain': [], 'audit': ['--enable-audit'], 'maintainer': ['--enable-maintainer-mode'], 'full': ['--enable-audit', '--enable-maintainer-mode'], } # CMake "generators." Maps a value for cmake's -G option to a command line to # run. # # I prefer Ninja if available, because it's fast. But hey, the default will # work. # # Maps the name of the generator (as used with cmake's -G option) to the # actual command line needed to do the build. CMAKE_GENERATORS = { 'Ninja': ['ninja'], 'Unix Makefiles': ['make', '-j%d' % CPUS], } class Fail(Exception): """A known, well-handled exception. Doesn't need a traceback.""" class Skip(Exception): """"We're not doing this build. It's not an error though.""" def run(cmd, output, cwd=None): """Run a command, write output to file-like object.""" command_line = ' '.join(cmd) output.write("%s\n\n" % command_line) check_call(cmd, stdout=output, stderr=output, cwd=cwd) def report(output, message): """Report a message to output, and standard output.""" print(message, flush=True) output.write('\n\n') output.write(message) output.write('\n') def file_contains(path, text): """Does the file at path contain text?""" with open(path) as stream: for line in stream: if text in line: return True return False @contextmanager def tmp_dir(): """Create a temporary directory, and clean it up again.""" tmp = mkdtemp() try: yield tmp finally: rmtree(tmp) def write_check_code(work_dir): """Write a simple C++ program so we can tesst whether we can compile it. Returns the file's full path. """ path = os.path.join(work_dir, "check.cxx") with open(path, 'w') as source: source.write(dedent("""\ #include <iostream> int main() { std::cout << "Hello world." << std::endl; } """)) return path def check_compiler(work_dir, cxx, stdlib, check, verbose=False): """Is the given compiler combo available?""" err_file = os.path.join(work_dir, 'stderr.log') if verbose: err_output = open(err_file, 'w') else: err_output = DEVNULL try: command = [cxx, check] if stdlib != '': command.append(stdlib) check_call(command, cwd=work_dir, stderr=err_output) except (OSError, CalledProcessError): if verbose: with open(err_file) as errors: stdout.write(errors.read()) print("Can't build with '%s %s'. Skipping." % (cxx, stdlib)) return False else: return True # TODO: Use Pool. def check_compilers(compilers, stdlibs, verbose=False): """Check which compiler configurations are viable.""" with tmp_dir() as work_dir: check = write_check_code(work_dir) return [ (cxx, stdlib) for stdlib in stdlibs for cxx in compilers if check_compiler( work_dir, cxx, stdlib, check=check, verbose=verbose) ] def find_cmake_command(): """Figure out a CMake generator we can use, or None.""" try: caps = check_output(['cmake', '-E', 'capabilities']) except FileNotFoundError as error: return None names = {generator['name'] for generator in json.loads(caps)['generators']} for gen, cmd in CMAKE_GENERATORS.items(): if gen in names: return gen return None class Config: """Configuration for a build. These classes must be suitable for pickling, so we can send its objects to worker processes. """ __metaclass__ = ABCMeta @abstractmethod def name(self): """Return an identifier for this build configuration.""" def make_log_name(self): """Compose log file name for this build.""" return "build-%s.out" % self.name() class Build: """A pending or ondoing build, in its own directory. Each step returns True for Success, or False for failure. These classes must be suitable for pickling, so we can send its objects to worker processes. """ def __init__(self, logs_dir, config=None): self.config = config self.log = os.path.join(logs_dir, config.make_log_name()) # Start a fresh log file. with open(self.log, 'w') as log: log.write("Starting %s.\n" % datetime.utcnow()) self.work_dir = mkdtemp() def clean_up(self): """Delete the build tree.""" rmtree(self.work_dir) @abstractmethod def configure(self, log): """Prepare for a build.""" @abstractmethod def build(self, log): """Build the code, including the tests. Don't run tests though.""" def test(self, log): """Run tests.""" run( [os.path.join(os.path.curdir, 'test', 'runner')], log, cwd=self.work_dir) def logging(self, function): """Call function, pass open write handle for `self.log`.""" # TODO: Should probably be a decorator. with open(self.log, 'a') as log: try: function(log) except Exception as error: log.write("%s\n" % error) raise def do_configure(self): """Call `configure`, writing output to `self.log`.""" self.logging(self.configure) def do_build(self): """Call `build`, writing output to `self.log`.""" self.logging(self.build) def do_test(self): """Call `test`, writing output to `self.log`.""" self.logging(self.test) class AutotoolsConfig(Config): """A combination of build options for the "configure" script.""" def __init__(self, cxx, opt, stdlib, link, link_opts, debug, debug_opts): self.cxx = cxx self.opt = opt self.stdlib = stdlib self.link = link self.link_opts = link_opts self.debug = debug self.debug_opts = debug_opts def name(self): return '_'.join([ self.cxx, self.opt, self.stdlib, self.link, self.debug]) class AutotoolsBuild(Build): """Build using the "configure" script.""" __metaclass__ = ABCMeta def configure(self, log): configure = [ os.path.join(getcwd(), "configure"), "CXX=%s" % self.config.cxx, ] if self.config.stdlib == '': configure += [ "CXXFLAGS=%s" % self.config.opt, ] else: configure += [ "CXXFLAGS=%s %s" % (self.config.opt, self.config.stdlib), "LDFLAGS=%s" % self.config.stdlib, ] configure += [ "--disable-documentation", ] + self.config.link_opts + self.config.debug_opts run(configure, log, cwd=self.work_dir) def build(self, log): run(['make', '-j%d' % CPUS], log, cwd=self.work_dir) # Passing "TESTS=" like this will suppress the actual running of # the tests. We run them in the "test" stage. run(['make', '-j%d' % CPUS, 'check', 'TESTS='], log, cwd=self.work_dir) class CMakeConfig(Config): """Configuration for a CMake build.""" def __init__(self, generator): self.generator = generator self.builder = CMAKE_GENERATORS[generator] def name(self): return "cmake" class CMakeBuild(Build): """Build using CMake. Ignores the config for now. """ __metaclass__ = ABCMeta def configure(self, log): source_dir = getcwd() generator = self.config.generator run( ['cmake', '-G', generator, source_dir], output=log, cwd=self.work_dir) def build(self, log): run(self.config.builder, log, cwd=self.work_dir) def parse_args(): """Parse command-line arguments.""" parser = ArgumentParser(description=__doc__) parser.add_argument('--verbose', '-v', action='store_true') parser.add_argument( '--compilers', '-c', default=','.join(CXX), help="Compilers, separated by commas. Default is %(default)s.") parser.add_argument( '--optimize', '-O', default=','.join(OPT), help=( "Alternative optimisation options, separated by commas. " "Default is %(default)s.")) parser.add_argument( '--stdlibs', '-L', default=','.join(STDLIB), help=( "Comma-separated options for choosing standard library. " "Defaults to %(default)s.")) parser.add_argument( '--logs', '-l', default='.', metavar='DIRECTORY', help="Write build logs to DIRECTORY.") parser.add_argument( '--jobs', '-j', default=CPUS, metavar='CPUS', help=( "When running 'make', run up to CPUS concurrent processes. " "Defaults to %(default)s.")) parser.add_argument( '--minimal', '-m', action='store_true', help="Make it as short a run as possible. For testing this script.") return parser.parse_args() def soft_get(queue, block=True): """Get an item off `queue`, or `None` if the queue is empty.""" try: return queue.get(block) except Empty: return None def read_queue(queue, block=True): """Read entries off `queue`, terminating when it gets a `None`. Also terminates when the queue is empty. """ entry = soft_get(queue, block) while entry is not None: yield entry entry = soft_get(queue, block) def service_builds(in_queue, fail_queue, out_queue): """Worker process for "build" stage: process one job at a time. Sends successful builds to `out_queue`, and failed builds to `fail_queue`. Terminates when it receives a `None`, at which point it will send a `None` into `out_queue` in turn. """ for build in read_queue(in_queue): try: build.do_build() except Exception as error: fail_queue.put((build, "%s" % error)) else: out_queue.put(build) in_queue.task_done() # Mark the end of the queue. out_queue.put(None) def service_tests(in_queue, fail_queue, out_queue): """Worker process for "test" stage: test one build at a time. Sends successful builds to `out_queue`, and failed builds to `fail_queue`. Terminates when it receives a final `None`. Does not send out a final `None` of its own. """ for build in read_queue(in_queue): try: build.do_test() except Exception as error: fail_queue.put((build, "%s" % error)) else: out_queue.put(build) in_queue.task_done() def report_failures(queue, message): """Report failures from a failure queue. Return total number.""" failures = 0 for build, error in read_queue(queue, block=False): print("%s: %s - %s" % (message, build.config.name(), error)) failures += 1 return failures def count_entries(queue): """Get and discard all entries from `queue`, return the total count.""" total = 0 for _ in read_queue(queue, block=False): total += 1 return total def gather_builds(args): """Produce the list of builds we want to perform.""" if args.verbose: print("\nChecking available compilers.") compiler_candidates = args.compilers.split(',') compilers = check_compilers( compiler_candidates, args.stdlibs.split(','), verbose=args.verbose) if list(compilers) == []: raise Fail( "Did not find any viable compilers. Tried: %s." % ', '.join(compiler_candidates)) opt_levels = args.optimize.split(',') link_types = LINK.items() debug_mixes = DEBUG.items() if args.minimal: compilers = compilers[:1] opt_levels = opt_levels[:1] link_types = list(link_types)[:1] debug_mixes = list(debug_mixes)[:1] builds = [ AutotoolsBuild( args.logs, AutotoolsConfig( opt=opt, link=link, link_opts=link_opts, debug=debug, debug_opts=debug_opts, cxx=cxx, stdlib=stdlib)) for opt in sorted(opt_levels) for link, link_opts in sorted(link_types) for debug, debug_opts in sorted(debug_mixes) for cxx, stdlib in compilers ] cmake = find_cmake_command() if cmake is not None: builds.append(CMakeBuild(args.logs, CMakeConfig(cmake))) return builds def enqueue(queue, build, *args): """Put `build` on `queue`. Ignores additional arguments, so that it can be used as a clalback for `Pool`. We do this instead of a lambda in order to get the closure right. We want the build for the current iteration, not the last one that was executed before the lambda runs. """ queue.put(build) def enqueue_error(queue, build, error): """Put the pair of `build` and `error` on `queue`.""" queue.put((build, error)) def main(args): """Do it all.""" if not os.path.isdir(args.logs): raise Fail("Logs location '%s' is not a directory." % args.logs) builds = gather_builds(args) if args.verbose: print("Lined up %d builds." % len(builds)) # The "configure" step is single-threaded. We can run many at the same # time, even when we're also running a "build" step at the same time. # This means we may run a lot more processes than we have CPUs, but there's # no law against that. There's also I/O time to be covered. configure_pool = Pool() # Builds which have failed the "configure" stage, with their errors. This # queue must never stall, so that we can let results pile up here while the # work continues. configure_fails = Queue(len(builds)) # Waiting list for the "build" stage. It contains Build objects, # terminated by a final None to signify that there are no more builds to be # done. build_queue = JoinableQueue(10) # Builds that have failed the "build" stage. build_fails = Queue(len(builds)) # Waiting list for the "test" stage. It contains Build objects, terminated # by a final None. test_queue = JoinableQueue(10) # The "build" step tries to utilise all CPUs, and it may use a fair bit of # memory. Run only one of these at a time, in a single worker process. build_worker = Process( target=service_builds, args=(build_queue, build_fails, test_queue)) build_worker.start() # Builds that have failed the "test" stage. test_fails = Queue(len(builds)) # Completed builds. This must never stall. done_queue = JoinableQueue(len(builds)) # The "test" step can not run concurrently (yet). So, run tests serially # in a single worker process. It takes its jobs directly from the "build" # worker. test_worker = Process( target=service_tests, args=(test_queue, test_fails, done_queue)) test_worker.start() # Feed all builds into the "configure" pool. Each build which passes this # stage goes into the "build" queue. for build in builds: configure_pool.apply_async( build.do_configure, callback=partial(enqueue, build_queue, build), error_callback=partial(enqueue_error, configure_fails, build)) if args.verbose: print("All jobs are underway.") configure_pool.close() configure_pool.join() # TODO: Async reporting for faster feedback. configure_fail_count = report_failures(configure_fails, "CONFIGURE FAIL") if args.verbose: print("Configure stage done.") # Mark the end of the build queue for the build worker. build_queue.put(None) build_worker.join() # TODO: Async reporting for faster feedback. build_fail_count = report_failures(build_fails, "BUILD FAIL") if args.verbose: print("Build step done.") # Mark the end of the test queue for the test worker. test_queue.put(None) test_worker.join() # TODO: Async reporting for faster feedback. # TODO: Collate failures into meaningful output, e.g. "shared library fails." test_fail_count = report_failures(test_fails, "TEST FAIL") if args.verbose: print("Test step done.") # All done. Clean up. for build in builds: build.clean_up() ok_count = count_entries(done_queue) if ok_count == len(builds): print("All tests OK.") else: print( "Failures during configure: %d - build: %d - test: %d. OK: %d." % ( configure_fail_count, build_fail_count, test_fail_count, ok_count, )) if __name__ == '__main__': try: exit(main(parse_args())) except Fail as failure: stderr.write("%s\n" % failure) exit(2)
graphical_interface.py
#!/usr/bin/env python import rospy import math import time import sys, select, termios, tty import os from std_msgs.msg import Empty import geometry_msgs.msg from geometry_msgs.msg import Twist from geometry_msgs.msg import TwistStamped x = 0 y = 0 z = 0 from Tkinter import * import ttk import threading import ars_control import subprocess global window window= Tk() window.config(background= "#41B77F") prompt='Click any button, or press a key' L = Label(window, text=prompt, width=len(prompt)) L.pack() cmd = """ #!/bin/bash echo "Stopping Robot": rostopic pub -1 /robot_cmd_stamped geometry_msgs/TwistStamped " header: seq: 0 stamp: secs: 0 nsecs: 0 frame_id: '' twist: linear: x: 0.0 y: 0.0 z: 0.0 angular: x: 0.0 y: 0.0 z: 0.0" """ def key(event): if event.char == event.keysym: msg ='Normal Key %r' % event.char elif len(event.char) == 1: msg ='Punctuation Key %r (%r)' % (event.keysym, event.char) else: msg ='Special Key %r' % event.keysym L.config(text=msg) L.bind_all('<Key>', key) def do_mouse(eventname): def mouse_binding(event): msg = 'Mouse event %s' % eventname L.config(text=msg) L.bind_all('<%s>'%eventname, mouse_binding) for i in range(1,4): do_mouse('Button-%s' % i) do_mouse('ButtonRelease-%s' % i) do_mouse('Double-Button-%s' % i) def quit(): distro = window.destroy() exit = sys.exit() return distro, exit def getdistance(): distance = float(distance_selected.get()) return distance def getspeed(): speed = float(speed_selected.get()) return speed def stop_moving(): t = threading.Thread(target=os.system(cmd)) t.start() return def moveforward_background(): speed= getspeed() distance=getdistance() t = threading.Thread(target= ars_control.moveX(speed, distance, True)) t.start() def moveback_background(): speed= getspeed() distance=getdistance() t = threading.Thread(target= ars_control.moveX(speed, distance, False)) t.start() def moveleft_background(): speed= getspeed() distance=getdistance() t = threading.Thread(target= ars_control.moveY(speed, distance, True)) t.start() def moveright_background(): speed= getspeed() distance=getdistance() t = threading.Thread(target= ars_control.moveY(speed, distance, False)) t.start() def return_home(): t = threading.Thread(target= ars_control.control()) t.start() def goup_background(): speed= getspeed() distance=getdistance() t = threading.Thread(target= ars_control.moveZ(speed, distance, True)) t.start() def godown_background(): speed= getspeed() distance=getdistance() t = threading.Thread(target= ars_control.moveZ(speed, distance, False)) t.start() def rotationmoveleft_background(): t = threading.Thread(target= ars_control.rotate(10, 65, True)) t.start() def rotationmoveright_background(): t = threading.Thread(target= ars_control.rotate(10, 65, False)) t.start() #Define a callback function for exit def quit_program(e): window.destroy() if __name__ == '__main__': try: rospy.init_node('ars_remote_controller_test', anonymous=True) position_topic = "/robot_cmd_stamped" #(change me /robot_cmd_stamped) pose_publisher = rospy.Publisher(position_topic, TwistStamped, queue_size=1) label_title= Label(window, text= "Controller", font=("Courrier",40), bg = "#41B77F", fg= "white") label_title.pack() window.title("Nao_Drone") window.geometry("1080x600") window.minsize(1000,500) #user choice of speed and distance speed_label= Label(window,text = "Distance").place(x=30,y=50) distance_label= Label(window, text= "Speed").place(x=740,y=50) distance_var= StringVar() distance_selected= ttk.Combobox(window, width=20,textvariable=distance_var) distance_selected['values']=('0.1','0.2','0.3','0.5') distance_selected.place(x=800,y=50) distance_selected.current(0) speed_var= StringVar() speed_selected= ttk.Combobox(window, width=20,textvariable=speed_var) speed_selected['values']=('0.1','0.2','0.3','0.5') speed_selected.place(x=80,y=50) speed_selected.current(0) moveforward_button = Button(window, text="Move Forward", height= "3", width="20",command = moveforward_background).place(x=450,y=150) moveback_button= Button(window, text="Move Back", height= "3", width="20", command = moveback_background).place(x=450,y=350) moveleft_button= Button(window, text="Move Left", height= "3", width="20", command= moveleft_background).place(x=350,y=250) moveright_button = Button(window, text="Move Right", height= "3", width="20", command = moveright_background).place(x=550,y=250) goup_button = Button(window, text= "Go Up", height="3", width="20", command= goup_background).place(x = 450, y = 450) godown_button = Button(window, text= "Go Down", height="3", width= "20", command= godown_background).place(x= 450, y =520) quit_button = Button(window, text= "Quit Interface", height = "3", width= "20", command = quit).place(x=30, y= 300) init_state_button = Button(window, text= "Stop moving", height = "3", width= "20", command = stop_moving).place(x=30, y= 420) rotationmoveleft_button= Button(window, text= "Rotate to the left", height="3", width= "20", command= rotationmoveleft_background).place(x=800, y=450) rotationmoveright_button = Button(window, text = "Rotate to the right", height="3", width= "20", command= rotationmoveright_background).place(x=800,y=520) #Add a Label widget label = Label(window, text= "Press Ctrl + x to Exit", font= ('Helvetica 15 bold')) label.pack(pady=10) #Bind the Keyboard shortcut Key window.bind('<Control-x>', quit_program) window.mainloop() while True: window.mainloop() except rospy.ROSInterruptException: rospy.loginfo("node terminated.")
test_migrate_stopped_vm_progress.py
''' New Integration test for testing stopped vm migration between hosts. @author: quarkonics ''' import zstackwoodpecker.test_util as test_util import zstackwoodpecker.test_lib as test_lib import zstackwoodpecker.operations.volume_operations as vol_ops import zstackwoodpecker.operations.resource_operations as res_ops import apibinding.inventory as inventory import threading import time vm = None test_stub = test_lib.lib_get_test_stub() def migrate_volume(volume_uuid, target_host_uuid): vol_ops.migrate_volume(volume_uuid, target_host_uuid) def test(): global vm vm = test_stub.create_vr_vm('migrate_stopped_vm', 'imageName_s', 'l3VlanNetwork2') ps = test_lib.lib_get_primary_storage_by_uuid(vm.get_vm().allVolumes[0].primaryStorageUuid) if ps.type != inventory.LOCAL_STORAGE_TYPE: test_util.test_skip('Skip test on non-localstorage') target_host = test_lib.lib_find_random_host(vm.vm) vm.stop() thread = threading.Thread(target=migrate_volume, args=(vm.get_vm().allVolumes[0].uuid, target_host.uuid, )) thread.start() time.sleep(5) progress = res_ops.get_task_progress(vm.get_vm().allVolumes[0].uuid) if int(progress.progress) < 0 or int(progress.progress) > 100: test_util.test_fail("Progress of task should be between 0 and 100, while it actually is %s" % (progress.progress)) thread.join() vm.destroy() test_util.test_pass('Migrate Stopped VM Test Success') #Will be called only if exception happens in test(). def error_cleanup(): global vm if vm: try: vm.destroy() except: pass
utils.py
#!/usr/bin/env python import sys import array import numpy as np from skimage.color import rgb2gray from skimage.transform import resize from skimage.io import imread import matplotlib.pyplot as plt import matplotlib.image as mpimg from inputs import get_gamepad import math import threading def resize_image(img): im = resize(img, (Sample.IMG_H, Sample.IMG_W, Sample.IMG_D)) im_arr = im.reshape((Sample.IMG_H, Sample.IMG_W, Sample.IMG_D)) return im_arr class Screenshot(object): SRC_W = 640 SRC_H = 480 SRC_D = 3 OFFSET_X = 0 OFFSET_Y = 0 class Sample: IMG_W = 200 IMG_H = 66 IMG_D = 3 class XboxController(object): MAX_TRIG_VAL = math.pow(2, 8) MAX_JOY_VAL = math.pow(2, 15) def __init__(self): self.LeftJoystickY = 0 self.LeftJoystickX = 0 self.RightJoystickY = 0 self.RightJoystickX = 0 self.LeftTrigger = 0 self.RightTrigger = 0 self.LeftBumper = 0 self.RightBumper = 0 self.A = 0 self.X = 0 self.Y = 0 self.B = 0 self.LeftThumb = 0 self.RightThumb = 0 self.Back = 0 self.Start = 0 self.LeftDPad = 0 self.RightDPad = 0 self.UpDPad = 0 self.DownDPad = 0 self._monitor_thread = threading.Thread(target=self._monitor_controller, args=()) self._monitor_thread.daemon = True self._monitor_thread.start() def read(self): x = self.LeftJoystickX y = self.LeftJoystickY a = self.A b = self.X # b=1, x=2 rb = self.RightBumper return [x, y, a, b, rb] def _monitor_controller(self): while True: events = get_gamepad() for event in events: if event.code == 'ABS_Y': self.LeftJoystickY = event.state / XboxController.MAX_JOY_VAL # normalize between -1 and 1 elif event.code == 'ABS_X': self.LeftJoystickX = event.state / XboxController.MAX_JOY_VAL # normalize between -1 and 1 elif event.code == 'ABS_RY': self.RightJoystickY = event.state / XboxController.MAX_JOY_VAL # normalize between -1 and 1 elif event.code == 'ABS_RX': self.RightJoystickX = event.state / XboxController.MAX_JOY_VAL # normalize between -1 and 1 elif event.code == 'ABS_Z': self.LeftTrigger = event.state / XboxController.MAX_TRIG_VAL # normalize between 0 and 1 elif event.code == 'ABS_RZ': self.RightTrigger = event.state / XboxController.MAX_TRIG_VAL # normalize between 0 and 1 elif event.code == 'BTN_TL': self.LeftBumper = event.state elif event.code == 'BTN_TR': self.RightBumper = event.state elif event.code == 'BTN_SOUTH': self.A = event.state elif event.code == 'BTN_NORTH': self.X = event.state elif event.code == 'BTN_WEST': self.Y = event.state elif event.code == 'BTN_EAST': self.B = event.state elif event.code == 'BTN_THUMBL': self.LeftThumb = event.state elif event.code == 'BTN_THUMBR': self.RightThumb = event.state elif event.code == 'BTN_SELECT': self.Back = event.state elif event.code == 'BTN_START': self.Start = event.state elif event.code == 'BTN_TRIGGER_HAPPY1': self.LeftDPad = event.state elif event.code == 'BTN_TRIGGER_HAPPY2': self.RightDPad = event.state elif event.code == 'BTN_TRIGGER_HAPPY3': self.UpDPad = event.state elif event.code == 'BTN_TRIGGER_HAPPY4': self.DownDPad = event.state class Data(object): def __init__(self): self._X = np.load("data/X.npy") self._y = np.load("data/y.npy") self._epochs_completed = 0 self._index_in_epoch = 0 self._num_examples = self._X.shape[0] @property def num_examples(self): return self._num_examples def next_batch(self, batch_size): start = self._index_in_epoch self._index_in_epoch += batch_size if self._index_in_epoch > self._num_examples: # Finished epoch self._epochs_completed += 1 # Start next epoch start = 0 self._index_in_epoch = batch_size assert batch_size <= self._num_examples end = self._index_in_epoch return self._X[start:end], self._y[start:end] def load_sample(sample): image_files = np.loadtxt(sample + '/data.csv', delimiter=',', dtype=str, usecols=(0,)) joystick_values = np.loadtxt(sample + '/data.csv', delimiter=',', usecols=(1,2,3,4,5)) return image_files, joystick_values # training data viewer def viewer(sample): image_files, joystick_values = load_sample(sample) plotData = [] plt.ion() plt.figure('viewer', figsize=(16, 6)) for i in range(len(image_files)): # joystick print(i, " ", joystick_values[i,:]) # format data plotData.append( joystick_values[i,:] ) if len(plotData) > 30: plotData.pop(0) x = np.asarray(plotData) # image (every 3rd) if (i % 3 == 0): plt.subplot(121) image_file = image_files[i] img = mpimg.imread(image_file) plt.imshow(img) # plot plt.subplot(122) plt.plot(range(i,i+len(plotData)), x[:,0], 'r') plt.hold(True) plt.plot(range(i,i+len(plotData)), x[:,1], 'b') plt.plot(range(i,i+len(plotData)), x[:,2], 'g') plt.plot(range(i,i+len(plotData)), x[:,3], 'k') plt.plot(range(i,i+len(plotData)), x[:,4], 'y') plt.draw() plt.hold(False) plt.pause(0.0001) # seconds i += 1 # prepare training data def prepare(samples): print("Preparing data") X = [] y = [] for sample in samples: print(sample) # load sample image_files, joystick_values = load_sample(sample) # add joystick values to y y.append(joystick_values) # load, prepare and add images to X for image_file in image_files: image = imread(image_file) vec = resize_image(image) X.append(vec) print("Saving to file...") X = np.asarray(X) y = np.concatenate(y) np.save("data/X", X) np.save("data/y", y) print("Done!") return if __name__ == '__main__': if sys.argv[1] == 'viewer': viewer(sys.argv[2]) elif sys.argv[1] == 'prepare': prepare(sys.argv[2:])
test_logging.py
# Copyright 2001-2019 by Vinay Sajip. All Rights Reserved. # # Permission to use, copy, modify, and distribute this software and its # documentation for any purpose and without fee is hereby granted, # provided that the above copyright notice appear in all copies and that # both that copyright notice and this permission notice appear in # supporting documentation, and that the name of Vinay Sajip # not be used in advertising or publicity pertaining to distribution # of the software without specific, written prior permission. # VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING # ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL # VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR # ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER # IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT # OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. """Test harness for the logging module. Run all tests. Copyright (C) 2001-2019 Vinay Sajip. All Rights Reserved. """ import logging import logging.handlers import logging.config import codecs import configparser import copy import datetime import pathlib import pickle import io import gc import json import os import queue import random import re import signal import socket import struct import sys import tempfile from test.support.script_helper import assert_python_ok, assert_python_failure from test import support import textwrap import threading import time import unittest import warnings import weakref import asyncore from http.server import HTTPServer, BaseHTTPRequestHandler import smtpd from urllib.parse import urlparse, parse_qs from socketserver import (ThreadingUDPServer, DatagramRequestHandler, ThreadingTCPServer, StreamRequestHandler) try: import win32evtlog, win32evtlogutil, pywintypes except ImportError: win32evtlog = win32evtlogutil = pywintypes = None try: import zlib except ImportError: pass class BaseTest(unittest.TestCase): """Base class for logging tests.""" log_format = "%(name)s -> %(levelname)s: %(message)s" expected_log_pat = r"^([\w.]+) -> (\w+): (\d+)$" message_num = 0 def setUp(self): """Setup the default logging stream to an internal StringIO instance, so that we can examine log output as we want.""" self._threading_key = support.threading_setup() logger_dict = logging.getLogger().manager.loggerDict logging._acquireLock() try: self.saved_handlers = logging._handlers.copy() self.saved_handler_list = logging._handlerList[:] self.saved_loggers = saved_loggers = logger_dict.copy() self.saved_name_to_level = logging._nameToLevel.copy() self.saved_level_to_name = logging._levelToName.copy() self.logger_states = logger_states = {} for name in saved_loggers: logger_states[name] = getattr(saved_loggers[name], 'disabled', None) finally: logging._releaseLock() # Set two unused loggers self.logger1 = logging.getLogger("\xab\xd7\xbb") self.logger2 = logging.getLogger("\u013f\u00d6\u0047") self.root_logger = logging.getLogger("") self.original_logging_level = self.root_logger.getEffectiveLevel() self.stream = io.StringIO() self.root_logger.setLevel(logging.DEBUG) self.root_hdlr = logging.StreamHandler(self.stream) self.root_formatter = logging.Formatter(self.log_format) self.root_hdlr.setFormatter(self.root_formatter) if self.logger1.hasHandlers(): hlist = self.logger1.handlers + self.root_logger.handlers raise AssertionError('Unexpected handlers: %s' % hlist) if self.logger2.hasHandlers(): hlist = self.logger2.handlers + self.root_logger.handlers raise AssertionError('Unexpected handlers: %s' % hlist) self.root_logger.addHandler(self.root_hdlr) self.assertTrue(self.logger1.hasHandlers()) self.assertTrue(self.logger2.hasHandlers()) def tearDown(self): """Remove our logging stream, and restore the original logging level.""" self.stream.close() self.root_logger.removeHandler(self.root_hdlr) while self.root_logger.handlers: h = self.root_logger.handlers[0] self.root_logger.removeHandler(h) h.close() self.root_logger.setLevel(self.original_logging_level) logging._acquireLock() try: logging._levelToName.clear() logging._levelToName.update(self.saved_level_to_name) logging._nameToLevel.clear() logging._nameToLevel.update(self.saved_name_to_level) logging._handlers.clear() logging._handlers.update(self.saved_handlers) logging._handlerList[:] = self.saved_handler_list manager = logging.getLogger().manager manager.disable = 0 loggerDict = manager.loggerDict loggerDict.clear() loggerDict.update(self.saved_loggers) logger_states = self.logger_states for name in self.logger_states: if logger_states[name] is not None: self.saved_loggers[name].disabled = logger_states[name] finally: logging._releaseLock() self.doCleanups() support.threading_cleanup(*self._threading_key) def assert_log_lines(self, expected_values, stream=None, pat=None): """Match the collected log lines against the regular expression self.expected_log_pat, and compare the extracted group values to the expected_values list of tuples.""" stream = stream or self.stream pat = re.compile(pat or self.expected_log_pat) actual_lines = stream.getvalue().splitlines() self.assertEqual(len(actual_lines), len(expected_values)) for actual, expected in zip(actual_lines, expected_values): match = pat.search(actual) if not match: self.fail("Log line does not match expected pattern:\n" + actual) self.assertEqual(tuple(match.groups()), expected) s = stream.read() if s: self.fail("Remaining output at end of log stream:\n" + s) def next_message(self): """Generate a message consisting solely of an auto-incrementing integer.""" self.message_num += 1 return "%d" % self.message_num class BuiltinLevelsTest(BaseTest): """Test builtin levels and their inheritance.""" def test_flat(self): # Logging levels in a flat logger namespace. m = self.next_message ERR = logging.getLogger("ERR") ERR.setLevel(logging.ERROR) INF = logging.LoggerAdapter(logging.getLogger("INF"), {}) INF.setLevel(logging.INFO) DEB = logging.getLogger("DEB") DEB.setLevel(logging.DEBUG) # These should log. ERR.log(logging.CRITICAL, m()) ERR.error(m()) INF.log(logging.CRITICAL, m()) INF.error(m()) INF.warning(m()) INF.info(m()) DEB.log(logging.CRITICAL, m()) DEB.error(m()) DEB.warning(m()) DEB.info(m()) DEB.debug(m()) # These should not log. ERR.warning(m()) ERR.info(m()) ERR.debug(m()) INF.debug(m()) self.assert_log_lines([ ('ERR', 'CRITICAL', '1'), ('ERR', 'ERROR', '2'), ('INF', 'CRITICAL', '3'), ('INF', 'ERROR', '4'), ('INF', 'WARNING', '5'), ('INF', 'INFO', '6'), ('DEB', 'CRITICAL', '7'), ('DEB', 'ERROR', '8'), ('DEB', 'WARNING', '9'), ('DEB', 'INFO', '10'), ('DEB', 'DEBUG', '11'), ]) def test_nested_explicit(self): # Logging levels in a nested namespace, all explicitly set. m = self.next_message INF = logging.getLogger("INF") INF.setLevel(logging.INFO) INF_ERR = logging.getLogger("INF.ERR") INF_ERR.setLevel(logging.ERROR) # These should log. INF_ERR.log(logging.CRITICAL, m()) INF_ERR.error(m()) # These should not log. INF_ERR.warning(m()) INF_ERR.info(m()) INF_ERR.debug(m()) self.assert_log_lines([ ('INF.ERR', 'CRITICAL', '1'), ('INF.ERR', 'ERROR', '2'), ]) def test_nested_inherited(self): # Logging levels in a nested namespace, inherited from parent loggers. m = self.next_message INF = logging.getLogger("INF") INF.setLevel(logging.INFO) INF_ERR = logging.getLogger("INF.ERR") INF_ERR.setLevel(logging.ERROR) INF_UNDEF = logging.getLogger("INF.UNDEF") INF_ERR_UNDEF = logging.getLogger("INF.ERR.UNDEF") UNDEF = logging.getLogger("UNDEF") # These should log. INF_UNDEF.log(logging.CRITICAL, m()) INF_UNDEF.error(m()) INF_UNDEF.warning(m()) INF_UNDEF.info(m()) INF_ERR_UNDEF.log(logging.CRITICAL, m()) INF_ERR_UNDEF.error(m()) # These should not log. INF_UNDEF.debug(m()) INF_ERR_UNDEF.warning(m()) INF_ERR_UNDEF.info(m()) INF_ERR_UNDEF.debug(m()) self.assert_log_lines([ ('INF.UNDEF', 'CRITICAL', '1'), ('INF.UNDEF', 'ERROR', '2'), ('INF.UNDEF', 'WARNING', '3'), ('INF.UNDEF', 'INFO', '4'), ('INF.ERR.UNDEF', 'CRITICAL', '5'), ('INF.ERR.UNDEF', 'ERROR', '6'), ]) def test_nested_with_virtual_parent(self): # Logging levels when some parent does not exist yet. m = self.next_message INF = logging.getLogger("INF") GRANDCHILD = logging.getLogger("INF.BADPARENT.UNDEF") CHILD = logging.getLogger("INF.BADPARENT") INF.setLevel(logging.INFO) # These should log. GRANDCHILD.log(logging.FATAL, m()) GRANDCHILD.info(m()) CHILD.log(logging.FATAL, m()) CHILD.info(m()) # These should not log. GRANDCHILD.debug(m()) CHILD.debug(m()) self.assert_log_lines([ ('INF.BADPARENT.UNDEF', 'CRITICAL', '1'), ('INF.BADPARENT.UNDEF', 'INFO', '2'), ('INF.BADPARENT', 'CRITICAL', '3'), ('INF.BADPARENT', 'INFO', '4'), ]) def test_regression_22386(self): """See issue #22386 for more information.""" self.assertEqual(logging.getLevelName('INFO'), logging.INFO) self.assertEqual(logging.getLevelName(logging.INFO), 'INFO') def test_issue27935(self): fatal = logging.getLevelName('FATAL') self.assertEqual(fatal, logging.FATAL) def test_regression_29220(self): """See issue #29220 for more information.""" logging.addLevelName(logging.INFO, '') self.addCleanup(logging.addLevelName, logging.INFO, 'INFO') self.assertEqual(logging.getLevelName(logging.INFO), '') self.assertEqual(logging.getLevelName(logging.NOTSET), 'NOTSET') self.assertEqual(logging.getLevelName('NOTSET'), logging.NOTSET) class BasicFilterTest(BaseTest): """Test the bundled Filter class.""" def test_filter(self): # Only messages satisfying the specified criteria pass through the # filter. filter_ = logging.Filter("spam.eggs") handler = self.root_logger.handlers[0] try: handler.addFilter(filter_) spam = logging.getLogger("spam") spam_eggs = logging.getLogger("spam.eggs") spam_eggs_fish = logging.getLogger("spam.eggs.fish") spam_bakedbeans = logging.getLogger("spam.bakedbeans") spam.info(self.next_message()) spam_eggs.info(self.next_message()) # Good. spam_eggs_fish.info(self.next_message()) # Good. spam_bakedbeans.info(self.next_message()) self.assert_log_lines([ ('spam.eggs', 'INFO', '2'), ('spam.eggs.fish', 'INFO', '3'), ]) finally: handler.removeFilter(filter_) def test_callable_filter(self): # Only messages satisfying the specified criteria pass through the # filter. def filterfunc(record): parts = record.name.split('.') prefix = '.'.join(parts[:2]) return prefix == 'spam.eggs' handler = self.root_logger.handlers[0] try: handler.addFilter(filterfunc) spam = logging.getLogger("spam") spam_eggs = logging.getLogger("spam.eggs") spam_eggs_fish = logging.getLogger("spam.eggs.fish") spam_bakedbeans = logging.getLogger("spam.bakedbeans") spam.info(self.next_message()) spam_eggs.info(self.next_message()) # Good. spam_eggs_fish.info(self.next_message()) # Good. spam_bakedbeans.info(self.next_message()) self.assert_log_lines([ ('spam.eggs', 'INFO', '2'), ('spam.eggs.fish', 'INFO', '3'), ]) finally: handler.removeFilter(filterfunc) def test_empty_filter(self): f = logging.Filter() r = logging.makeLogRecord({'name': 'spam.eggs'}) self.assertTrue(f.filter(r)) # # First, we define our levels. There can be as many as you want - the only # limitations are that they should be integers, the lowest should be > 0 and # larger values mean less information being logged. If you need specific # level values which do not fit into these limitations, you can use a # mapping dictionary to convert between your application levels and the # logging system. # SILENT = 120 TACITURN = 119 TERSE = 118 EFFUSIVE = 117 SOCIABLE = 116 VERBOSE = 115 TALKATIVE = 114 GARRULOUS = 113 CHATTERBOX = 112 BORING = 111 LEVEL_RANGE = range(BORING, SILENT + 1) # # Next, we define names for our levels. You don't need to do this - in which # case the system will use "Level n" to denote the text for the level. # my_logging_levels = { SILENT : 'Silent', TACITURN : 'Taciturn', TERSE : 'Terse', EFFUSIVE : 'Effusive', SOCIABLE : 'Sociable', VERBOSE : 'Verbose', TALKATIVE : 'Talkative', GARRULOUS : 'Garrulous', CHATTERBOX : 'Chatterbox', BORING : 'Boring', } class GarrulousFilter(logging.Filter): """A filter which blocks garrulous messages.""" def filter(self, record): return record.levelno != GARRULOUS class VerySpecificFilter(logging.Filter): """A filter which blocks sociable and taciturn messages.""" def filter(self, record): return record.levelno not in [SOCIABLE, TACITURN] class CustomLevelsAndFiltersTest(BaseTest): """Test various filtering possibilities with custom logging levels.""" # Skip the logger name group. expected_log_pat = r"^[\w.]+ -> (\w+): (\d+)$" def setUp(self): BaseTest.setUp(self) for k, v in my_logging_levels.items(): logging.addLevelName(k, v) def log_at_all_levels(self, logger): for lvl in LEVEL_RANGE: logger.log(lvl, self.next_message()) def test_logger_filter(self): # Filter at logger level. self.root_logger.setLevel(VERBOSE) # Levels >= 'Verbose' are good. self.log_at_all_levels(self.root_logger) self.assert_log_lines([ ('Verbose', '5'), ('Sociable', '6'), ('Effusive', '7'), ('Terse', '8'), ('Taciturn', '9'), ('Silent', '10'), ]) def test_handler_filter(self): # Filter at handler level. self.root_logger.handlers[0].setLevel(SOCIABLE) try: # Levels >= 'Sociable' are good. self.log_at_all_levels(self.root_logger) self.assert_log_lines([ ('Sociable', '6'), ('Effusive', '7'), ('Terse', '8'), ('Taciturn', '9'), ('Silent', '10'), ]) finally: self.root_logger.handlers[0].setLevel(logging.NOTSET) def test_specific_filters(self): # Set a specific filter object on the handler, and then add another # filter object on the logger itself. handler = self.root_logger.handlers[0] specific_filter = None garr = GarrulousFilter() handler.addFilter(garr) try: self.log_at_all_levels(self.root_logger) first_lines = [ # Notice how 'Garrulous' is missing ('Boring', '1'), ('Chatterbox', '2'), ('Talkative', '4'), ('Verbose', '5'), ('Sociable', '6'), ('Effusive', '7'), ('Terse', '8'), ('Taciturn', '9'), ('Silent', '10'), ] self.assert_log_lines(first_lines) specific_filter = VerySpecificFilter() self.root_logger.addFilter(specific_filter) self.log_at_all_levels(self.root_logger) self.assert_log_lines(first_lines + [ # Not only 'Garrulous' is still missing, but also 'Sociable' # and 'Taciturn' ('Boring', '11'), ('Chatterbox', '12'), ('Talkative', '14'), ('Verbose', '15'), ('Effusive', '17'), ('Terse', '18'), ('Silent', '20'), ]) finally: if specific_filter: self.root_logger.removeFilter(specific_filter) handler.removeFilter(garr) class HandlerTest(BaseTest): def test_name(self): h = logging.Handler() h.name = 'generic' self.assertEqual(h.name, 'generic') h.name = 'anothergeneric' self.assertEqual(h.name, 'anothergeneric') self.assertRaises(NotImplementedError, h.emit, None) def test_builtin_handlers(self): # We can't actually *use* too many handlers in the tests, # but we can try instantiating them with various options if sys.platform in ('linux', 'darwin'): for existing in (True, False): fd, fn = tempfile.mkstemp() os.close(fd) if not existing: os.unlink(fn) h = logging.handlers.WatchedFileHandler(fn, delay=True) if existing: dev, ino = h.dev, h.ino self.assertEqual(dev, -1) self.assertEqual(ino, -1) r = logging.makeLogRecord({'msg': 'Test'}) h.handle(r) # Now remove the file. os.unlink(fn) self.assertFalse(os.path.exists(fn)) # The next call should recreate the file. h.handle(r) self.assertTrue(os.path.exists(fn)) else: self.assertEqual(h.dev, -1) self.assertEqual(h.ino, -1) h.close() if existing: os.unlink(fn) if sys.platform == 'darwin': sockname = '/var/run/syslog' else: sockname = '/dev/log' try: h = logging.handlers.SysLogHandler(sockname) self.assertEqual(h.facility, h.LOG_USER) self.assertTrue(h.unixsocket) h.close() except OSError: # syslogd might not be available pass for method in ('GET', 'POST', 'PUT'): if method == 'PUT': self.assertRaises(ValueError, logging.handlers.HTTPHandler, 'localhost', '/log', method) else: h = logging.handlers.HTTPHandler('localhost', '/log', method) h.close() h = logging.handlers.BufferingHandler(0) r = logging.makeLogRecord({}) self.assertTrue(h.shouldFlush(r)) h.close() h = logging.handlers.BufferingHandler(1) self.assertFalse(h.shouldFlush(r)) h.close() def test_path_objects(self): """ Test that Path objects are accepted as filename arguments to handlers. See Issue #27493. """ fd, fn = tempfile.mkstemp() os.close(fd) os.unlink(fn) pfn = pathlib.Path(fn) cases = ( (logging.FileHandler, (pfn, 'w')), (logging.handlers.RotatingFileHandler, (pfn, 'a')), (logging.handlers.TimedRotatingFileHandler, (pfn, 'h')), ) if sys.platform in ('linux', 'darwin'): cases += ((logging.handlers.WatchedFileHandler, (pfn, 'w')),) for cls, args in cases: h = cls(*args) self.assertTrue(os.path.exists(fn)) h.close() os.unlink(fn) @unittest.skipIf(os.name == 'nt', 'WatchedFileHandler not appropriate for Windows.') def test_race(self): # Issue #14632 refers. def remove_loop(fname, tries): for _ in range(tries): try: os.unlink(fname) self.deletion_time = time.time() except OSError: pass time.sleep(0.004 * random.randint(0, 4)) del_count = 500 log_count = 500 self.handle_time = None self.deletion_time = None for delay in (False, True): fd, fn = tempfile.mkstemp('.log', 'test_logging-3-') os.close(fd) remover = threading.Thread(target=remove_loop, args=(fn, del_count)) remover.daemon = True remover.start() h = logging.handlers.WatchedFileHandler(fn, delay=delay) f = logging.Formatter('%(asctime)s: %(levelname)s: %(message)s') h.setFormatter(f) try: for _ in range(log_count): time.sleep(0.005) r = logging.makeLogRecord({'msg': 'testing' }) try: self.handle_time = time.time() h.handle(r) except Exception: print('Deleted at %s, ' 'opened at %s' % (self.deletion_time, self.handle_time)) raise finally: remover.join() h.close() if os.path.exists(fn): os.unlink(fn) # The implementation relies on os.register_at_fork existing, but we test # based on os.fork existing because that is what users and this test use. # This helps ensure that when fork exists (the important concept) that the # register_at_fork mechanism is also present and used. @unittest.skipIf(not hasattr(os, 'fork'), 'Test requires os.fork().') def test_post_fork_child_no_deadlock(self): """Ensure child logging locks are not held; bpo-6721 & bpo-36533.""" class _OurHandler(logging.Handler): def __init__(self): super().__init__() self.sub_handler = logging.StreamHandler( stream=open('/dev/null', 'wt')) def emit(self, record): self.sub_handler.acquire() try: self.sub_handler.emit(record) finally: self.sub_handler.release() self.assertEqual(len(logging._handlers), 0) refed_h = _OurHandler() self.addCleanup(refed_h.sub_handler.stream.close) refed_h.name = 'because we need at least one for this test' self.assertGreater(len(logging._handlers), 0) self.assertGreater(len(logging._at_fork_reinit_lock_weakset), 1) test_logger = logging.getLogger('test_post_fork_child_no_deadlock') test_logger.addHandler(refed_h) test_logger.setLevel(logging.DEBUG) locks_held__ready_to_fork = threading.Event() fork_happened__release_locks_and_end_thread = threading.Event() def lock_holder_thread_fn(): logging._acquireLock() try: refed_h.acquire() try: # Tell the main thread to do the fork. locks_held__ready_to_fork.set() # If the deadlock bug exists, the fork will happen # without dealing with the locks we hold, deadlocking # the child. # Wait for a successful fork or an unreasonable amount of # time before releasing our locks. To avoid a timing based # test we'd need communication from os.fork() as to when it # has actually happened. Given this is a regression test # for a fixed issue, potentially less reliably detecting # regression via timing is acceptable for simplicity. # The test will always take at least this long. :( fork_happened__release_locks_and_end_thread.wait(0.5) finally: refed_h.release() finally: logging._releaseLock() lock_holder_thread = threading.Thread( target=lock_holder_thread_fn, name='test_post_fork_child_no_deadlock lock holder') lock_holder_thread.start() locks_held__ready_to_fork.wait() pid = os.fork() if pid == 0: # Child. try: test_logger.info(r'Child process did not deadlock. \o/') finally: os._exit(0) else: # Parent. test_logger.info(r'Parent process returned from fork. \o/') fork_happened__release_locks_and_end_thread.set() lock_holder_thread.join() start_time = time.monotonic() while True: test_logger.debug('Waiting for child process.') waited_pid, status = os.waitpid(pid, os.WNOHANG) if waited_pid == pid: break # child process exited. if time.monotonic() - start_time > 7: break # so long? implies child deadlock. time.sleep(0.05) test_logger.debug('Done waiting.') if waited_pid != pid: os.kill(pid, signal.SIGKILL) waited_pid, status = os.waitpid(pid, 0) self.fail("child process deadlocked.") self.assertEqual(status, 0, msg="child process error") class BadStream(object): def write(self, data): raise RuntimeError('deliberate mistake') class TestStreamHandler(logging.StreamHandler): def handleError(self, record): self.error_record = record class StreamWithIntName(object): level = logging.NOTSET name = 2 class StreamHandlerTest(BaseTest): def test_error_handling(self): h = TestStreamHandler(BadStream()) r = logging.makeLogRecord({}) old_raise = logging.raiseExceptions try: h.handle(r) self.assertIs(h.error_record, r) h = logging.StreamHandler(BadStream()) with support.captured_stderr() as stderr: h.handle(r) msg = '\nRuntimeError: deliberate mistake\n' self.assertIn(msg, stderr.getvalue()) logging.raiseExceptions = False with support.captured_stderr() as stderr: h.handle(r) self.assertEqual('', stderr.getvalue()) finally: logging.raiseExceptions = old_raise def test_stream_setting(self): """ Test setting the handler's stream """ h = logging.StreamHandler() stream = io.StringIO() old = h.setStream(stream) self.assertIs(old, sys.stderr) actual = h.setStream(old) self.assertIs(actual, stream) # test that setting to existing value returns None actual = h.setStream(old) self.assertIsNone(actual) def test_can_represent_stream_with_int_name(self): h = logging.StreamHandler(StreamWithIntName()) self.assertEqual(repr(h), '<StreamHandler 2 (NOTSET)>') # -- The following section could be moved into a server_helper.py module # -- if it proves to be of wider utility than just test_logging class TestSMTPServer(smtpd.SMTPServer): """ This class implements a test SMTP server. :param addr: A (host, port) tuple which the server listens on. You can specify a port value of zero: the server's *port* attribute will hold the actual port number used, which can be used in client connections. :param handler: A callable which will be called to process incoming messages. The handler will be passed the client address tuple, who the message is from, a list of recipients and the message data. :param poll_interval: The interval, in seconds, used in the underlying :func:`select` or :func:`poll` call by :func:`asyncore.loop`. :param sockmap: A dictionary which will be used to hold :class:`asyncore.dispatcher` instances used by :func:`asyncore.loop`. This avoids changing the :mod:`asyncore` module's global state. """ def __init__(self, addr, handler, poll_interval, sockmap): smtpd.SMTPServer.__init__(self, addr, None, map=sockmap, decode_data=True) self.port = self.socket.getsockname()[1] self._handler = handler self._thread = None self.poll_interval = poll_interval def process_message(self, peer, mailfrom, rcpttos, data): """ Delegates to the handler passed in to the server's constructor. Typically, this will be a test case method. :param peer: The client (host, port) tuple. :param mailfrom: The address of the sender. :param rcpttos: The addresses of the recipients. :param data: The message. """ self._handler(peer, mailfrom, rcpttos, data) def start(self): """ Start the server running on a separate daemon thread. """ self._thread = t = threading.Thread(target=self.serve_forever, args=(self.poll_interval,)) t.setDaemon(True) t.start() def serve_forever(self, poll_interval): """ Run the :mod:`asyncore` loop until normal termination conditions arise. :param poll_interval: The interval, in seconds, used in the underlying :func:`select` or :func:`poll` call by :func:`asyncore.loop`. """ asyncore.loop(poll_interval, map=self._map) def stop(self): """ Stop the thread by closing the server instance. Wait for the server thread to terminate. """ self.close() support.join_thread(self._thread) self._thread = None asyncore.close_all(map=self._map, ignore_all=True) class ControlMixin(object): """ This mixin is used to start a server on a separate thread, and shut it down programmatically. Request handling is simplified - instead of needing to derive a suitable RequestHandler subclass, you just provide a callable which will be passed each received request to be processed. :param handler: A handler callable which will be called with a single parameter - the request - in order to process the request. This handler is called on the server thread, effectively meaning that requests are processed serially. While not quite Web scale ;-), this should be fine for testing applications. :param poll_interval: The polling interval in seconds. """ def __init__(self, handler, poll_interval): self._thread = None self.poll_interval = poll_interval self._handler = handler self.ready = threading.Event() def start(self): """ Create a daemon thread to run the server, and start it. """ self._thread = t = threading.Thread(target=self.serve_forever, args=(self.poll_interval,)) t.setDaemon(True) t.start() def serve_forever(self, poll_interval): """ Run the server. Set the ready flag before entering the service loop. """ self.ready.set() super(ControlMixin, self).serve_forever(poll_interval) def stop(self): """ Tell the server thread to stop, and wait for it to do so. """ self.shutdown() if self._thread is not None: support.join_thread(self._thread) self._thread = None self.server_close() self.ready.clear() class TestHTTPServer(ControlMixin, HTTPServer): """ An HTTP server which is controllable using :class:`ControlMixin`. :param addr: A tuple with the IP address and port to listen on. :param handler: A handler callable which will be called with a single parameter - the request - in order to process the request. :param poll_interval: The polling interval in seconds. :param log: Pass ``True`` to enable log messages. """ def __init__(self, addr, handler, poll_interval=0.5, log=False, sslctx=None): class DelegatingHTTPRequestHandler(BaseHTTPRequestHandler): def __getattr__(self, name, default=None): if name.startswith('do_'): return self.process_request raise AttributeError(name) def process_request(self): self.server._handler(self) def log_message(self, format, *args): if log: super(DelegatingHTTPRequestHandler, self).log_message(format, *args) HTTPServer.__init__(self, addr, DelegatingHTTPRequestHandler) ControlMixin.__init__(self, handler, poll_interval) self.sslctx = sslctx def get_request(self): try: sock, addr = self.socket.accept() if self.sslctx: sock = self.sslctx.wrap_socket(sock, server_side=True) except OSError as e: # socket errors are silenced by the caller, print them here sys.stderr.write("Got an error:\n%s\n" % e) raise return sock, addr class TestTCPServer(ControlMixin, ThreadingTCPServer): """ A TCP server which is controllable using :class:`ControlMixin`. :param addr: A tuple with the IP address and port to listen on. :param handler: A handler callable which will be called with a single parameter - the request - in order to process the request. :param poll_interval: The polling interval in seconds. :bind_and_activate: If True (the default), binds the server and starts it listening. If False, you need to call :meth:`server_bind` and :meth:`server_activate` at some later time before calling :meth:`start`, so that the server will set up the socket and listen on it. """ allow_reuse_address = True def __init__(self, addr, handler, poll_interval=0.5, bind_and_activate=True): class DelegatingTCPRequestHandler(StreamRequestHandler): def handle(self): self.server._handler(self) ThreadingTCPServer.__init__(self, addr, DelegatingTCPRequestHandler, bind_and_activate) ControlMixin.__init__(self, handler, poll_interval) def server_bind(self): super(TestTCPServer, self).server_bind() self.port = self.socket.getsockname()[1] class TestUDPServer(ControlMixin, ThreadingUDPServer): """ A UDP server which is controllable using :class:`ControlMixin`. :param addr: A tuple with the IP address and port to listen on. :param handler: A handler callable which will be called with a single parameter - the request - in order to process the request. :param poll_interval: The polling interval for shutdown requests, in seconds. :bind_and_activate: If True (the default), binds the server and starts it listening. If False, you need to call :meth:`server_bind` and :meth:`server_activate` at some later time before calling :meth:`start`, so that the server will set up the socket and listen on it. """ def __init__(self, addr, handler, poll_interval=0.5, bind_and_activate=True): class DelegatingUDPRequestHandler(DatagramRequestHandler): def handle(self): self.server._handler(self) def finish(self): data = self.wfile.getvalue() if data: try: super(DelegatingUDPRequestHandler, self).finish() except OSError: if not self.server._closed: raise ThreadingUDPServer.__init__(self, addr, DelegatingUDPRequestHandler, bind_and_activate) ControlMixin.__init__(self, handler, poll_interval) self._closed = False def server_bind(self): super(TestUDPServer, self).server_bind() self.port = self.socket.getsockname()[1] def server_close(self): super(TestUDPServer, self).server_close() self._closed = True if hasattr(socket, "AF_UNIX"): class TestUnixStreamServer(TestTCPServer): address_family = socket.AF_UNIX class TestUnixDatagramServer(TestUDPServer): address_family = socket.AF_UNIX # - end of server_helper section class SMTPHandlerTest(BaseTest): # bpo-14314, bpo-19665, bpo-34092: don't wait forever TIMEOUT = support.LONG_TIMEOUT def test_basic(self): sockmap = {} server = TestSMTPServer((support.HOST, 0), self.process_message, 0.001, sockmap) server.start() addr = (support.HOST, server.port) h = logging.handlers.SMTPHandler(addr, 'me', 'you', 'Log', timeout=self.TIMEOUT) self.assertEqual(h.toaddrs, ['you']) self.messages = [] r = logging.makeLogRecord({'msg': 'Hello \u2713'}) self.handled = threading.Event() h.handle(r) self.handled.wait(self.TIMEOUT) server.stop() self.assertTrue(self.handled.is_set()) self.assertEqual(len(self.messages), 1) peer, mailfrom, rcpttos, data = self.messages[0] self.assertEqual(mailfrom, 'me') self.assertEqual(rcpttos, ['you']) self.assertIn('\nSubject: Log\n', data) self.assertTrue(data.endswith('\n\nHello \u2713')) h.close() def process_message(self, *args): self.messages.append(args) self.handled.set() class MemoryHandlerTest(BaseTest): """Tests for the MemoryHandler.""" # Do not bother with a logger name group. expected_log_pat = r"^[\w.]+ -> (\w+): (\d+)$" def setUp(self): BaseTest.setUp(self) self.mem_hdlr = logging.handlers.MemoryHandler(10, logging.WARNING, self.root_hdlr) self.mem_logger = logging.getLogger('mem') self.mem_logger.propagate = 0 self.mem_logger.addHandler(self.mem_hdlr) def tearDown(self): self.mem_hdlr.close() BaseTest.tearDown(self) def test_flush(self): # The memory handler flushes to its target handler based on specific # criteria (message count and message level). self.mem_logger.debug(self.next_message()) self.assert_log_lines([]) self.mem_logger.info(self.next_message()) self.assert_log_lines([]) # This will flush because the level is >= logging.WARNING self.mem_logger.warning(self.next_message()) lines = [ ('DEBUG', '1'), ('INFO', '2'), ('WARNING', '3'), ] self.assert_log_lines(lines) for n in (4, 14): for i in range(9): self.mem_logger.debug(self.next_message()) self.assert_log_lines(lines) # This will flush because it's the 10th message since the last # flush. self.mem_logger.debug(self.next_message()) lines = lines + [('DEBUG', str(i)) for i in range(n, n + 10)] self.assert_log_lines(lines) self.mem_logger.debug(self.next_message()) self.assert_log_lines(lines) def test_flush_on_close(self): """ Test that the flush-on-close configuration works as expected. """ self.mem_logger.debug(self.next_message()) self.assert_log_lines([]) self.mem_logger.info(self.next_message()) self.assert_log_lines([]) self.mem_logger.removeHandler(self.mem_hdlr) # Default behaviour is to flush on close. Check that it happens. self.mem_hdlr.close() lines = [ ('DEBUG', '1'), ('INFO', '2'), ] self.assert_log_lines(lines) # Now configure for flushing not to be done on close. self.mem_hdlr = logging.handlers.MemoryHandler(10, logging.WARNING, self.root_hdlr, False) self.mem_logger.addHandler(self.mem_hdlr) self.mem_logger.debug(self.next_message()) self.assert_log_lines(lines) # no change self.mem_logger.info(self.next_message()) self.assert_log_lines(lines) # no change self.mem_logger.removeHandler(self.mem_hdlr) self.mem_hdlr.close() # assert that no new lines have been added self.assert_log_lines(lines) # no change class ExceptionFormatter(logging.Formatter): """A special exception formatter.""" def formatException(self, ei): return "Got a [%s]" % ei[0].__name__ class ConfigFileTest(BaseTest): """Reading logging config from a .ini-style config file.""" check_no_resource_warning = support.check_no_resource_warning expected_log_pat = r"^(\w+) \+\+ (\w+)$" # config0 is a standard configuration. config0 = """ [loggers] keys=root [handlers] keys=hand1 [formatters] keys=form1 [logger_root] level=WARNING handlers=hand1 [handler_hand1] class=StreamHandler level=NOTSET formatter=form1 args=(sys.stdout,) [formatter_form1] format=%(levelname)s ++ %(message)s datefmt= """ # config1 adds a little to the standard configuration. config1 = """ [loggers] keys=root,parser [handlers] keys=hand1 [formatters] keys=form1 [logger_root] level=WARNING handlers= [logger_parser] level=DEBUG handlers=hand1 propagate=1 qualname=compiler.parser [handler_hand1] class=StreamHandler level=NOTSET formatter=form1 args=(sys.stdout,) [formatter_form1] format=%(levelname)s ++ %(message)s datefmt= """ # config1a moves the handler to the root. config1a = """ [loggers] keys=root,parser [handlers] keys=hand1 [formatters] keys=form1 [logger_root] level=WARNING handlers=hand1 [logger_parser] level=DEBUG handlers= propagate=1 qualname=compiler.parser [handler_hand1] class=StreamHandler level=NOTSET formatter=form1 args=(sys.stdout,) [formatter_form1] format=%(levelname)s ++ %(message)s datefmt= """ # config2 has a subtle configuration error that should be reported config2 = config1.replace("sys.stdout", "sys.stbout") # config3 has a less subtle configuration error config3 = config1.replace("formatter=form1", "formatter=misspelled_name") # config4 specifies a custom formatter class to be loaded config4 = """ [loggers] keys=root [handlers] keys=hand1 [formatters] keys=form1 [logger_root] level=NOTSET handlers=hand1 [handler_hand1] class=StreamHandler level=NOTSET formatter=form1 args=(sys.stdout,) [formatter_form1] class=""" + __name__ + """.ExceptionFormatter format=%(levelname)s:%(name)s:%(message)s datefmt= """ # config5 specifies a custom handler class to be loaded config5 = config1.replace('class=StreamHandler', 'class=logging.StreamHandler') # config6 uses ', ' delimiters in the handlers and formatters sections config6 = """ [loggers] keys=root,parser [handlers] keys=hand1, hand2 [formatters] keys=form1, form2 [logger_root] level=WARNING handlers= [logger_parser] level=DEBUG handlers=hand1 propagate=1 qualname=compiler.parser [handler_hand1] class=StreamHandler level=NOTSET formatter=form1 args=(sys.stdout,) [handler_hand2] class=StreamHandler level=NOTSET formatter=form1 args=(sys.stderr,) [formatter_form1] format=%(levelname)s ++ %(message)s datefmt= [formatter_form2] format=%(message)s datefmt= """ # config7 adds a compiler logger, and uses kwargs instead of args. config7 = """ [loggers] keys=root,parser,compiler [handlers] keys=hand1 [formatters] keys=form1 [logger_root] level=WARNING handlers=hand1 [logger_compiler] level=DEBUG handlers= propagate=1 qualname=compiler [logger_parser] level=DEBUG handlers= propagate=1 qualname=compiler.parser [handler_hand1] class=StreamHandler level=NOTSET formatter=form1 kwargs={'stream': sys.stdout,} [formatter_form1] format=%(levelname)s ++ %(message)s datefmt= """ # config 8, check for resource warning config8 = r""" [loggers] keys=root [handlers] keys=file [formatters] keys= [logger_root] level=DEBUG handlers=file [handler_file] class=FileHandler level=DEBUG args=("{tempfile}",) """ disable_test = """ [loggers] keys=root [handlers] keys=screen [formatters] keys= [logger_root] level=DEBUG handlers=screen [handler_screen] level=DEBUG class=StreamHandler args=(sys.stdout,) formatter= """ def apply_config(self, conf, **kwargs): file = io.StringIO(textwrap.dedent(conf)) logging.config.fileConfig(file, **kwargs) def test_config0_ok(self): # A simple config file which overrides the default settings. with support.captured_stdout() as output: self.apply_config(self.config0) logger = logging.getLogger() # Won't output anything logger.info(self.next_message()) # Outputs a message logger.error(self.next_message()) self.assert_log_lines([ ('ERROR', '2'), ], stream=output) # Original logger output is empty. self.assert_log_lines([]) def test_config0_using_cp_ok(self): # A simple config file which overrides the default settings. with support.captured_stdout() as output: file = io.StringIO(textwrap.dedent(self.config0)) cp = configparser.ConfigParser() cp.read_file(file) logging.config.fileConfig(cp) logger = logging.getLogger() # Won't output anything logger.info(self.next_message()) # Outputs a message logger.error(self.next_message()) self.assert_log_lines([ ('ERROR', '2'), ], stream=output) # Original logger output is empty. self.assert_log_lines([]) def test_config1_ok(self, config=config1): # A config file defining a sub-parser as well. with support.captured_stdout() as output: self.apply_config(config) logger = logging.getLogger("compiler.parser") # Both will output a message logger.info(self.next_message()) logger.error(self.next_message()) self.assert_log_lines([ ('INFO', '1'), ('ERROR', '2'), ], stream=output) # Original logger output is empty. self.assert_log_lines([]) def test_config2_failure(self): # A simple config file which overrides the default settings. self.assertRaises(Exception, self.apply_config, self.config2) def test_config3_failure(self): # A simple config file which overrides the default settings. self.assertRaises(Exception, self.apply_config, self.config3) def test_config4_ok(self): # A config file specifying a custom formatter class. with support.captured_stdout() as output: self.apply_config(self.config4) logger = logging.getLogger() try: raise RuntimeError() except RuntimeError: logging.exception("just testing") sys.stdout.seek(0) self.assertEqual(output.getvalue(), "ERROR:root:just testing\nGot a [RuntimeError]\n") # Original logger output is empty self.assert_log_lines([]) def test_config5_ok(self): self.test_config1_ok(config=self.config5) def test_config6_ok(self): self.test_config1_ok(config=self.config6) def test_config7_ok(self): with support.captured_stdout() as output: self.apply_config(self.config1a) logger = logging.getLogger("compiler.parser") # See issue #11424. compiler-hyphenated sorts # between compiler and compiler.xyz and this # was preventing compiler.xyz from being included # in the child loggers of compiler because of an # overzealous loop termination condition. hyphenated = logging.getLogger('compiler-hyphenated') # All will output a message logger.info(self.next_message()) logger.error(self.next_message()) hyphenated.critical(self.next_message()) self.assert_log_lines([ ('INFO', '1'), ('ERROR', '2'), ('CRITICAL', '3'), ], stream=output) # Original logger output is empty. self.assert_log_lines([]) with support.captured_stdout() as output: self.apply_config(self.config7) logger = logging.getLogger("compiler.parser") self.assertFalse(logger.disabled) # Both will output a message logger.info(self.next_message()) logger.error(self.next_message()) logger = logging.getLogger("compiler.lexer") # Both will output a message logger.info(self.next_message()) logger.error(self.next_message()) # Will not appear hyphenated.critical(self.next_message()) self.assert_log_lines([ ('INFO', '4'), ('ERROR', '5'), ('INFO', '6'), ('ERROR', '7'), ], stream=output) # Original logger output is empty. self.assert_log_lines([]) def test_config8_ok(self): def cleanup(h1, fn): h1.close() os.remove(fn) with self.check_no_resource_warning(): fd, fn = tempfile.mkstemp(".log", "test_logging-X-") os.close(fd) # Replace single backslash with double backslash in windows # to avoid unicode error during string formatting if os.name == "nt": fn = fn.replace("\\", "\\\\") config8 = self.config8.format(tempfile=fn) self.apply_config(config8) self.apply_config(config8) handler = logging.root.handlers[0] self.addCleanup(cleanup, handler, fn) def test_logger_disabling(self): self.apply_config(self.disable_test) logger = logging.getLogger('some_pristine_logger') self.assertFalse(logger.disabled) self.apply_config(self.disable_test) self.assertTrue(logger.disabled) self.apply_config(self.disable_test, disable_existing_loggers=False) self.assertFalse(logger.disabled) def test_config_set_handler_names(self): test_config = """ [loggers] keys=root [handlers] keys=hand1 [formatters] keys=form1 [logger_root] handlers=hand1 [handler_hand1] class=StreamHandler formatter=form1 [formatter_form1] format=%(levelname)s ++ %(message)s """ self.apply_config(test_config) self.assertEqual(logging.getLogger().handlers[0].name, 'hand1') def test_defaults_do_no_interpolation(self): """bpo-33802 defaults should not get interpolated""" ini = textwrap.dedent(""" [formatters] keys=default [formatter_default] [handlers] keys=console [handler_console] class=logging.StreamHandler args=tuple() [loggers] keys=root [logger_root] formatter=default handlers=console """).strip() fd, fn = tempfile.mkstemp(prefix='test_logging_', suffix='.ini') try: os.write(fd, ini.encode('ascii')) os.close(fd) logging.config.fileConfig( fn, defaults=dict( version=1, disable_existing_loggers=False, formatters={ "generic": { "format": "%(asctime)s [%(process)d] [%(levelname)s] %(message)s", "datefmt": "[%Y-%m-%d %H:%M:%S %z]", "class": "logging.Formatter" }, }, ) ) finally: os.unlink(fn) class SocketHandlerTest(BaseTest): """Test for SocketHandler objects.""" server_class = TestTCPServer address = ('localhost', 0) def setUp(self): """Set up a TCP server to receive log messages, and a SocketHandler pointing to that server's address and port.""" BaseTest.setUp(self) # Issue #29177: deal with errors that happen during setup self.server = self.sock_hdlr = self.server_exception = None try: self.server = server = self.server_class(self.address, self.handle_socket, 0.01) server.start() # Uncomment next line to test error recovery in setUp() # raise OSError('dummy error raised') except OSError as e: self.server_exception = e return server.ready.wait() hcls = logging.handlers.SocketHandler if isinstance(server.server_address, tuple): self.sock_hdlr = hcls('localhost', server.port) else: self.sock_hdlr = hcls(server.server_address, None) self.log_output = '' self.root_logger.removeHandler(self.root_logger.handlers[0]) self.root_logger.addHandler(self.sock_hdlr) self.handled = threading.Semaphore(0) def tearDown(self): """Shutdown the TCP server.""" try: if self.sock_hdlr: self.root_logger.removeHandler(self.sock_hdlr) self.sock_hdlr.close() if self.server: self.server.stop() finally: BaseTest.tearDown(self) def handle_socket(self, request): conn = request.connection while True: chunk = conn.recv(4) if len(chunk) < 4: break slen = struct.unpack(">L", chunk)[0] chunk = conn.recv(slen) while len(chunk) < slen: chunk = chunk + conn.recv(slen - len(chunk)) obj = pickle.loads(chunk) record = logging.makeLogRecord(obj) self.log_output += record.msg + '\n' self.handled.release() def test_output(self): # The log message sent to the SocketHandler is properly received. if self.server_exception: self.skipTest(self.server_exception) logger = logging.getLogger("tcp") logger.error("spam") self.handled.acquire() logger.debug("eggs") self.handled.acquire() self.assertEqual(self.log_output, "spam\neggs\n") def test_noserver(self): if self.server_exception: self.skipTest(self.server_exception) # Avoid timing-related failures due to SocketHandler's own hard-wired # one-second timeout on socket.create_connection() (issue #16264). self.sock_hdlr.retryStart = 2.5 # Kill the server self.server.stop() # The logging call should try to connect, which should fail try: raise RuntimeError('Deliberate mistake') except RuntimeError: self.root_logger.exception('Never sent') self.root_logger.error('Never sent, either') now = time.time() self.assertGreater(self.sock_hdlr.retryTime, now) time.sleep(self.sock_hdlr.retryTime - now + 0.001) self.root_logger.error('Nor this') def _get_temp_domain_socket(): fd, fn = tempfile.mkstemp(prefix='test_logging_', suffix='.sock') os.close(fd) # just need a name - file can't be present, or we'll get an # 'address already in use' error. os.remove(fn) return fn @unittest.skipUnless(hasattr(socket, "AF_UNIX"), "Unix sockets required") class UnixSocketHandlerTest(SocketHandlerTest): """Test for SocketHandler with unix sockets.""" if hasattr(socket, "AF_UNIX"): server_class = TestUnixStreamServer def setUp(self): # override the definition in the base class self.address = _get_temp_domain_socket() SocketHandlerTest.setUp(self) def tearDown(self): SocketHandlerTest.tearDown(self) support.unlink(self.address) class DatagramHandlerTest(BaseTest): """Test for DatagramHandler.""" server_class = TestUDPServer address = ('localhost', 0) def setUp(self): """Set up a UDP server to receive log messages, and a DatagramHandler pointing to that server's address and port.""" BaseTest.setUp(self) # Issue #29177: deal with errors that happen during setup self.server = self.sock_hdlr = self.server_exception = None try: self.server = server = self.server_class(self.address, self.handle_datagram, 0.01) server.start() # Uncomment next line to test error recovery in setUp() # raise OSError('dummy error raised') except OSError as e: self.server_exception = e return server.ready.wait() hcls = logging.handlers.DatagramHandler if isinstance(server.server_address, tuple): self.sock_hdlr = hcls('localhost', server.port) else: self.sock_hdlr = hcls(server.server_address, None) self.log_output = '' self.root_logger.removeHandler(self.root_logger.handlers[0]) self.root_logger.addHandler(self.sock_hdlr) self.handled = threading.Event() def tearDown(self): """Shutdown the UDP server.""" try: if self.server: self.server.stop() if self.sock_hdlr: self.root_logger.removeHandler(self.sock_hdlr) self.sock_hdlr.close() finally: BaseTest.tearDown(self) def handle_datagram(self, request): slen = struct.pack('>L', 0) # length of prefix packet = request.packet[len(slen):] obj = pickle.loads(packet) record = logging.makeLogRecord(obj) self.log_output += record.msg + '\n' self.handled.set() def test_output(self): # The log message sent to the DatagramHandler is properly received. if self.server_exception: self.skipTest(self.server_exception) logger = logging.getLogger("udp") logger.error("spam") self.handled.wait() self.handled.clear() logger.error("eggs") self.handled.wait() self.assertEqual(self.log_output, "spam\neggs\n") @unittest.skipUnless(hasattr(socket, "AF_UNIX"), "Unix sockets required") class UnixDatagramHandlerTest(DatagramHandlerTest): """Test for DatagramHandler using Unix sockets.""" if hasattr(socket, "AF_UNIX"): server_class = TestUnixDatagramServer def setUp(self): # override the definition in the base class self.address = _get_temp_domain_socket() DatagramHandlerTest.setUp(self) def tearDown(self): DatagramHandlerTest.tearDown(self) support.unlink(self.address) class SysLogHandlerTest(BaseTest): """Test for SysLogHandler using UDP.""" server_class = TestUDPServer address = ('localhost', 0) def setUp(self): """Set up a UDP server to receive log messages, and a SysLogHandler pointing to that server's address and port.""" BaseTest.setUp(self) # Issue #29177: deal with errors that happen during setup self.server = self.sl_hdlr = self.server_exception = None try: self.server = server = self.server_class(self.address, self.handle_datagram, 0.01) server.start() # Uncomment next line to test error recovery in setUp() # raise OSError('dummy error raised') except OSError as e: self.server_exception = e return server.ready.wait() hcls = logging.handlers.SysLogHandler if isinstance(server.server_address, tuple): self.sl_hdlr = hcls((server.server_address[0], server.port)) else: self.sl_hdlr = hcls(server.server_address) self.log_output = '' self.root_logger.removeHandler(self.root_logger.handlers[0]) self.root_logger.addHandler(self.sl_hdlr) self.handled = threading.Event() def tearDown(self): """Shutdown the server.""" try: if self.server: self.server.stop() if self.sl_hdlr: self.root_logger.removeHandler(self.sl_hdlr) self.sl_hdlr.close() finally: BaseTest.tearDown(self) def handle_datagram(self, request): self.log_output = request.packet self.handled.set() def test_output(self): if self.server_exception: self.skipTest(self.server_exception) # The log message sent to the SysLogHandler is properly received. logger = logging.getLogger("slh") logger.error("sp\xe4m") self.handled.wait() self.assertEqual(self.log_output, b'<11>sp\xc3\xa4m\x00') self.handled.clear() self.sl_hdlr.append_nul = False logger.error("sp\xe4m") self.handled.wait() self.assertEqual(self.log_output, b'<11>sp\xc3\xa4m') self.handled.clear() self.sl_hdlr.ident = "h\xe4m-" logger.error("sp\xe4m") self.handled.wait() self.assertEqual(self.log_output, b'<11>h\xc3\xa4m-sp\xc3\xa4m') @unittest.skipUnless(hasattr(socket, "AF_UNIX"), "Unix sockets required") class UnixSysLogHandlerTest(SysLogHandlerTest): """Test for SysLogHandler with Unix sockets.""" if hasattr(socket, "AF_UNIX"): server_class = TestUnixDatagramServer def setUp(self): # override the definition in the base class self.address = _get_temp_domain_socket() SysLogHandlerTest.setUp(self) def tearDown(self): SysLogHandlerTest.tearDown(self) support.unlink(self.address) @unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 support required for this test.') class IPv6SysLogHandlerTest(SysLogHandlerTest): """Test for SysLogHandler with IPv6 host.""" server_class = TestUDPServer address = ('::1', 0) def setUp(self): self.server_class.address_family = socket.AF_INET6 super(IPv6SysLogHandlerTest, self).setUp() def tearDown(self): self.server_class.address_family = socket.AF_INET super(IPv6SysLogHandlerTest, self).tearDown() class HTTPHandlerTest(BaseTest): """Test for HTTPHandler.""" def setUp(self): """Set up an HTTP server to receive log messages, and a HTTPHandler pointing to that server's address and port.""" BaseTest.setUp(self) self.handled = threading.Event() def handle_request(self, request): self.command = request.command self.log_data = urlparse(request.path) if self.command == 'POST': try: rlen = int(request.headers['Content-Length']) self.post_data = request.rfile.read(rlen) except: self.post_data = None request.send_response(200) request.end_headers() self.handled.set() def test_output(self): # The log message sent to the HTTPHandler is properly received. logger = logging.getLogger("http") root_logger = self.root_logger root_logger.removeHandler(self.root_logger.handlers[0]) for secure in (False, True): addr = ('localhost', 0) if secure: try: import ssl except ImportError: sslctx = None else: here = os.path.dirname(__file__) localhost_cert = os.path.join(here, "keycert.pem") sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) sslctx.load_cert_chain(localhost_cert) context = ssl.create_default_context(cafile=localhost_cert) else: sslctx = None context = None self.server = server = TestHTTPServer(addr, self.handle_request, 0.01, sslctx=sslctx) server.start() server.ready.wait() host = 'localhost:%d' % server.server_port secure_client = secure and sslctx self.h_hdlr = logging.handlers.HTTPHandler(host, '/frob', secure=secure_client, context=context, credentials=('foo', 'bar')) self.log_data = None root_logger.addHandler(self.h_hdlr) for method in ('GET', 'POST'): self.h_hdlr.method = method self.handled.clear() msg = "sp\xe4m" logger.error(msg) self.handled.wait() self.assertEqual(self.log_data.path, '/frob') self.assertEqual(self.command, method) if method == 'GET': d = parse_qs(self.log_data.query) else: d = parse_qs(self.post_data.decode('utf-8')) self.assertEqual(d['name'], ['http']) self.assertEqual(d['funcName'], ['test_output']) self.assertEqual(d['msg'], [msg]) self.server.stop() self.root_logger.removeHandler(self.h_hdlr) self.h_hdlr.close() class MemoryTest(BaseTest): """Test memory persistence of logger objects.""" def setUp(self): """Create a dict to remember potentially destroyed objects.""" BaseTest.setUp(self) self._survivors = {} def _watch_for_survival(self, *args): """Watch the given objects for survival, by creating weakrefs to them.""" for obj in args: key = id(obj), repr(obj) self._survivors[key] = weakref.ref(obj) def _assertTruesurvival(self): """Assert that all objects watched for survival have survived.""" # Trigger cycle breaking. gc.collect() dead = [] for (id_, repr_), ref in self._survivors.items(): if ref() is None: dead.append(repr_) if dead: self.fail("%d objects should have survived " "but have been destroyed: %s" % (len(dead), ", ".join(dead))) def test_persistent_loggers(self): # Logger objects are persistent and retain their configuration, even # if visible references are destroyed. self.root_logger.setLevel(logging.INFO) foo = logging.getLogger("foo") self._watch_for_survival(foo) foo.setLevel(logging.DEBUG) self.root_logger.debug(self.next_message()) foo.debug(self.next_message()) self.assert_log_lines([ ('foo', 'DEBUG', '2'), ]) del foo # foo has survived. self._assertTruesurvival() # foo has retained its settings. bar = logging.getLogger("foo") bar.debug(self.next_message()) self.assert_log_lines([ ('foo', 'DEBUG', '2'), ('foo', 'DEBUG', '3'), ]) class EncodingTest(BaseTest): def test_encoding_plain_file(self): # In Python 2.x, a plain file object is treated as having no encoding. log = logging.getLogger("test") fd, fn = tempfile.mkstemp(".log", "test_logging-1-") os.close(fd) # the non-ascii data we write to the log. data = "foo\x80" try: handler = logging.FileHandler(fn, encoding="utf-8") log.addHandler(handler) try: # write non-ascii data to the log. log.warning(data) finally: log.removeHandler(handler) handler.close() # check we wrote exactly those bytes, ignoring trailing \n etc f = open(fn, encoding="utf-8") try: self.assertEqual(f.read().rstrip(), data) finally: f.close() finally: if os.path.isfile(fn): os.remove(fn) def test_encoding_cyrillic_unicode(self): log = logging.getLogger("test") # Get a message in Unicode: Do svidanya in Cyrillic (meaning goodbye) message = '\u0434\u043e \u0441\u0432\u0438\u0434\u0430\u043d\u0438\u044f' # Ensure it's written in a Cyrillic encoding writer_class = codecs.getwriter('cp1251') writer_class.encoding = 'cp1251' stream = io.BytesIO() writer = writer_class(stream, 'strict') handler = logging.StreamHandler(writer) log.addHandler(handler) try: log.warning(message) finally: log.removeHandler(handler) handler.close() # check we wrote exactly those bytes, ignoring trailing \n etc s = stream.getvalue() # Compare against what the data should be when encoded in CP-1251 self.assertEqual(s, b'\xe4\xee \xf1\xe2\xe8\xe4\xe0\xed\xe8\xff\n') class WarningsTest(BaseTest): def test_warnings(self): with warnings.catch_warnings(): logging.captureWarnings(True) self.addCleanup(logging.captureWarnings, False) warnings.filterwarnings("always", category=UserWarning) stream = io.StringIO() h = logging.StreamHandler(stream) logger = logging.getLogger("py.warnings") logger.addHandler(h) warnings.warn("I'm warning you...") logger.removeHandler(h) s = stream.getvalue() h.close() self.assertGreater(s.find("UserWarning: I'm warning you...\n"), 0) # See if an explicit file uses the original implementation a_file = io.StringIO() warnings.showwarning("Explicit", UserWarning, "dummy.py", 42, a_file, "Dummy line") s = a_file.getvalue() a_file.close() self.assertEqual(s, "dummy.py:42: UserWarning: Explicit\n Dummy line\n") def test_warnings_no_handlers(self): with warnings.catch_warnings(): logging.captureWarnings(True) self.addCleanup(logging.captureWarnings, False) # confirm our assumption: no loggers are set logger = logging.getLogger("py.warnings") self.assertEqual(logger.handlers, []) warnings.showwarning("Explicit", UserWarning, "dummy.py", 42) self.assertEqual(len(logger.handlers), 1) self.assertIsInstance(logger.handlers[0], logging.NullHandler) def formatFunc(format, datefmt=None): return logging.Formatter(format, datefmt) class myCustomFormatter: def __init__(self, fmt, datefmt=None): pass def handlerFunc(): return logging.StreamHandler() class CustomHandler(logging.StreamHandler): pass class ConfigDictTest(BaseTest): """Reading logging config from a dictionary.""" check_no_resource_warning = support.check_no_resource_warning expected_log_pat = r"^(\w+) \+\+ (\w+)$" # config0 is a standard configuration. config0 = { 'version': 1, 'formatters': { 'form1' : { 'format' : '%(levelname)s ++ %(message)s', }, }, 'handlers' : { 'hand1' : { 'class' : 'logging.StreamHandler', 'formatter' : 'form1', 'level' : 'NOTSET', 'stream' : 'ext://sys.stdout', }, }, 'root' : { 'level' : 'WARNING', 'handlers' : ['hand1'], }, } # config1 adds a little to the standard configuration. config1 = { 'version': 1, 'formatters': { 'form1' : { 'format' : '%(levelname)s ++ %(message)s', }, }, 'handlers' : { 'hand1' : { 'class' : 'logging.StreamHandler', 'formatter' : 'form1', 'level' : 'NOTSET', 'stream' : 'ext://sys.stdout', }, }, 'loggers' : { 'compiler.parser' : { 'level' : 'DEBUG', 'handlers' : ['hand1'], }, }, 'root' : { 'level' : 'WARNING', }, } # config1a moves the handler to the root. Used with config8a config1a = { 'version': 1, 'formatters': { 'form1' : { 'format' : '%(levelname)s ++ %(message)s', }, }, 'handlers' : { 'hand1' : { 'class' : 'logging.StreamHandler', 'formatter' : 'form1', 'level' : 'NOTSET', 'stream' : 'ext://sys.stdout', }, }, 'loggers' : { 'compiler.parser' : { 'level' : 'DEBUG', }, }, 'root' : { 'level' : 'WARNING', 'handlers' : ['hand1'], }, } # config2 has a subtle configuration error that should be reported config2 = { 'version': 1, 'formatters': { 'form1' : { 'format' : '%(levelname)s ++ %(message)s', }, }, 'handlers' : { 'hand1' : { 'class' : 'logging.StreamHandler', 'formatter' : 'form1', 'level' : 'NOTSET', 'stream' : 'ext://sys.stdbout', }, }, 'loggers' : { 'compiler.parser' : { 'level' : 'DEBUG', 'handlers' : ['hand1'], }, }, 'root' : { 'level' : 'WARNING', }, } # As config1 but with a misspelt level on a handler config2a = { 'version': 1, 'formatters': { 'form1' : { 'format' : '%(levelname)s ++ %(message)s', }, }, 'handlers' : { 'hand1' : { 'class' : 'logging.StreamHandler', 'formatter' : 'form1', 'level' : 'NTOSET', 'stream' : 'ext://sys.stdout', }, }, 'loggers' : { 'compiler.parser' : { 'level' : 'DEBUG', 'handlers' : ['hand1'], }, }, 'root' : { 'level' : 'WARNING', }, } # As config1 but with a misspelt level on a logger config2b = { 'version': 1, 'formatters': { 'form1' : { 'format' : '%(levelname)s ++ %(message)s', }, }, 'handlers' : { 'hand1' : { 'class' : 'logging.StreamHandler', 'formatter' : 'form1', 'level' : 'NOTSET', 'stream' : 'ext://sys.stdout', }, }, 'loggers' : { 'compiler.parser' : { 'level' : 'DEBUG', 'handlers' : ['hand1'], }, }, 'root' : { 'level' : 'WRANING', }, } # config3 has a less subtle configuration error config3 = { 'version': 1, 'formatters': { 'form1' : { 'format' : '%(levelname)s ++ %(message)s', }, }, 'handlers' : { 'hand1' : { 'class' : 'logging.StreamHandler', 'formatter' : 'misspelled_name', 'level' : 'NOTSET', 'stream' : 'ext://sys.stdout', }, }, 'loggers' : { 'compiler.parser' : { 'level' : 'DEBUG', 'handlers' : ['hand1'], }, }, 'root' : { 'level' : 'WARNING', }, } # config4 specifies a custom formatter class to be loaded config4 = { 'version': 1, 'formatters': { 'form1' : { '()' : __name__ + '.ExceptionFormatter', 'format' : '%(levelname)s:%(name)s:%(message)s', }, }, 'handlers' : { 'hand1' : { 'class' : 'logging.StreamHandler', 'formatter' : 'form1', 'level' : 'NOTSET', 'stream' : 'ext://sys.stdout', }, }, 'root' : { 'level' : 'NOTSET', 'handlers' : ['hand1'], }, } # As config4 but using an actual callable rather than a string config4a = { 'version': 1, 'formatters': { 'form1' : { '()' : ExceptionFormatter, 'format' : '%(levelname)s:%(name)s:%(message)s', }, 'form2' : { '()' : __name__ + '.formatFunc', 'format' : '%(levelname)s:%(name)s:%(message)s', }, 'form3' : { '()' : formatFunc, 'format' : '%(levelname)s:%(name)s:%(message)s', }, }, 'handlers' : { 'hand1' : { 'class' : 'logging.StreamHandler', 'formatter' : 'form1', 'level' : 'NOTSET', 'stream' : 'ext://sys.stdout', }, 'hand2' : { '()' : handlerFunc, }, }, 'root' : { 'level' : 'NOTSET', 'handlers' : ['hand1'], }, } # config5 specifies a custom handler class to be loaded config5 = { 'version': 1, 'formatters': { 'form1' : { 'format' : '%(levelname)s ++ %(message)s', }, }, 'handlers' : { 'hand1' : { 'class' : __name__ + '.CustomHandler', 'formatter' : 'form1', 'level' : 'NOTSET', 'stream' : 'ext://sys.stdout', }, }, 'loggers' : { 'compiler.parser' : { 'level' : 'DEBUG', 'handlers' : ['hand1'], }, }, 'root' : { 'level' : 'WARNING', }, } # config6 specifies a custom handler class to be loaded # but has bad arguments config6 = { 'version': 1, 'formatters': { 'form1' : { 'format' : '%(levelname)s ++ %(message)s', }, }, 'handlers' : { 'hand1' : { 'class' : __name__ + '.CustomHandler', 'formatter' : 'form1', 'level' : 'NOTSET', 'stream' : 'ext://sys.stdout', '9' : 'invalid parameter name', }, }, 'loggers' : { 'compiler.parser' : { 'level' : 'DEBUG', 'handlers' : ['hand1'], }, }, 'root' : { 'level' : 'WARNING', }, } # config 7 does not define compiler.parser but defines compiler.lexer # so compiler.parser should be disabled after applying it config7 = { 'version': 1, 'formatters': { 'form1' : { 'format' : '%(levelname)s ++ %(message)s', }, }, 'handlers' : { 'hand1' : { 'class' : 'logging.StreamHandler', 'formatter' : 'form1', 'level' : 'NOTSET', 'stream' : 'ext://sys.stdout', }, }, 'loggers' : { 'compiler.lexer' : { 'level' : 'DEBUG', 'handlers' : ['hand1'], }, }, 'root' : { 'level' : 'WARNING', }, } # config8 defines both compiler and compiler.lexer # so compiler.parser should not be disabled (since # compiler is defined) config8 = { 'version': 1, 'disable_existing_loggers' : False, 'formatters': { 'form1' : { 'format' : '%(levelname)s ++ %(message)s', }, }, 'handlers' : { 'hand1' : { 'class' : 'logging.StreamHandler', 'formatter' : 'form1', 'level' : 'NOTSET', 'stream' : 'ext://sys.stdout', }, }, 'loggers' : { 'compiler' : { 'level' : 'DEBUG', 'handlers' : ['hand1'], }, 'compiler.lexer' : { }, }, 'root' : { 'level' : 'WARNING', }, } # config8a disables existing loggers config8a = { 'version': 1, 'disable_existing_loggers' : True, 'formatters': { 'form1' : { 'format' : '%(levelname)s ++ %(message)s', }, }, 'handlers' : { 'hand1' : { 'class' : 'logging.StreamHandler', 'formatter' : 'form1', 'level' : 'NOTSET', 'stream' : 'ext://sys.stdout', }, }, 'loggers' : { 'compiler' : { 'level' : 'DEBUG', 'handlers' : ['hand1'], }, 'compiler.lexer' : { }, }, 'root' : { 'level' : 'WARNING', }, } config9 = { 'version': 1, 'formatters': { 'form1' : { 'format' : '%(levelname)s ++ %(message)s', }, }, 'handlers' : { 'hand1' : { 'class' : 'logging.StreamHandler', 'formatter' : 'form1', 'level' : 'WARNING', 'stream' : 'ext://sys.stdout', }, }, 'loggers' : { 'compiler.parser' : { 'level' : 'WARNING', 'handlers' : ['hand1'], }, }, 'root' : { 'level' : 'NOTSET', }, } config9a = { 'version': 1, 'incremental' : True, 'handlers' : { 'hand1' : { 'level' : 'WARNING', }, }, 'loggers' : { 'compiler.parser' : { 'level' : 'INFO', }, }, } config9b = { 'version': 1, 'incremental' : True, 'handlers' : { 'hand1' : { 'level' : 'INFO', }, }, 'loggers' : { 'compiler.parser' : { 'level' : 'INFO', }, }, } # As config1 but with a filter added config10 = { 'version': 1, 'formatters': { 'form1' : { 'format' : '%(levelname)s ++ %(message)s', }, }, 'filters' : { 'filt1' : { 'name' : 'compiler.parser', }, }, 'handlers' : { 'hand1' : { 'class' : 'logging.StreamHandler', 'formatter' : 'form1', 'level' : 'NOTSET', 'stream' : 'ext://sys.stdout', 'filters' : ['filt1'], }, }, 'loggers' : { 'compiler.parser' : { 'level' : 'DEBUG', 'filters' : ['filt1'], }, }, 'root' : { 'level' : 'WARNING', 'handlers' : ['hand1'], }, } # As config1 but using cfg:// references config11 = { 'version': 1, 'true_formatters': { 'form1' : { 'format' : '%(levelname)s ++ %(message)s', }, }, 'handler_configs': { 'hand1' : { 'class' : 'logging.StreamHandler', 'formatter' : 'form1', 'level' : 'NOTSET', 'stream' : 'ext://sys.stdout', }, }, 'formatters' : 'cfg://true_formatters', 'handlers' : { 'hand1' : 'cfg://handler_configs[hand1]', }, 'loggers' : { 'compiler.parser' : { 'level' : 'DEBUG', 'handlers' : ['hand1'], }, }, 'root' : { 'level' : 'WARNING', }, } # As config11 but missing the version key config12 = { 'true_formatters': { 'form1' : { 'format' : '%(levelname)s ++ %(message)s', }, }, 'handler_configs': { 'hand1' : { 'class' : 'logging.StreamHandler', 'formatter' : 'form1', 'level' : 'NOTSET', 'stream' : 'ext://sys.stdout', }, }, 'formatters' : 'cfg://true_formatters', 'handlers' : { 'hand1' : 'cfg://handler_configs[hand1]', }, 'loggers' : { 'compiler.parser' : { 'level' : 'DEBUG', 'handlers' : ['hand1'], }, }, 'root' : { 'level' : 'WARNING', }, } # As config11 but using an unsupported version config13 = { 'version': 2, 'true_formatters': { 'form1' : { 'format' : '%(levelname)s ++ %(message)s', }, }, 'handler_configs': { 'hand1' : { 'class' : 'logging.StreamHandler', 'formatter' : 'form1', 'level' : 'NOTSET', 'stream' : 'ext://sys.stdout', }, }, 'formatters' : 'cfg://true_formatters', 'handlers' : { 'hand1' : 'cfg://handler_configs[hand1]', }, 'loggers' : { 'compiler.parser' : { 'level' : 'DEBUG', 'handlers' : ['hand1'], }, }, 'root' : { 'level' : 'WARNING', }, } # As config0, but with properties config14 = { 'version': 1, 'formatters': { 'form1' : { 'format' : '%(levelname)s ++ %(message)s', }, }, 'handlers' : { 'hand1' : { 'class' : 'logging.StreamHandler', 'formatter' : 'form1', 'level' : 'NOTSET', 'stream' : 'ext://sys.stdout', '.': { 'foo': 'bar', 'terminator': '!\n', } }, }, 'root' : { 'level' : 'WARNING', 'handlers' : ['hand1'], }, } out_of_order = { "version": 1, "formatters": { "mySimpleFormatter": { "format": "%(asctime)s (%(name)s) %(levelname)s: %(message)s", "style": "$" } }, "handlers": { "fileGlobal": { "class": "logging.StreamHandler", "level": "DEBUG", "formatter": "mySimpleFormatter" }, "bufferGlobal": { "class": "logging.handlers.MemoryHandler", "capacity": 5, "formatter": "mySimpleFormatter", "target": "fileGlobal", "level": "DEBUG" } }, "loggers": { "mymodule": { "level": "DEBUG", "handlers": ["bufferGlobal"], "propagate": "true" } } } # Configuration with custom logging.Formatter subclass as '()' key and 'validate' set to False custom_formatter_class_validate = { 'version': 1, 'formatters': { 'form1': { '()': __name__ + '.ExceptionFormatter', 'format': '%(levelname)s:%(name)s:%(message)s', 'validate': False, }, }, 'handlers' : { 'hand1' : { 'class': 'logging.StreamHandler', 'formatter': 'form1', 'level': 'NOTSET', 'stream': 'ext://sys.stdout', }, }, "loggers": { "my_test_logger_custom_formatter": { "level": "DEBUG", "handlers": ["hand1"], "propagate": "true" } } } # Configuration with custom logging.Formatter subclass as 'class' key and 'validate' set to False custom_formatter_class_validate2 = { 'version': 1, 'formatters': { 'form1': { 'class': __name__ + '.ExceptionFormatter', 'format': '%(levelname)s:%(name)s:%(message)s', 'validate': False, }, }, 'handlers' : { 'hand1' : { 'class': 'logging.StreamHandler', 'formatter': 'form1', 'level': 'NOTSET', 'stream': 'ext://sys.stdout', }, }, "loggers": { "my_test_logger_custom_formatter": { "level": "DEBUG", "handlers": ["hand1"], "propagate": "true" } } } # Configuration with custom class that is not inherited from logging.Formatter custom_formatter_class_validate3 = { 'version': 1, 'formatters': { 'form1': { 'class': __name__ + '.myCustomFormatter', 'format': '%(levelname)s:%(name)s:%(message)s', 'validate': False, }, }, 'handlers' : { 'hand1' : { 'class': 'logging.StreamHandler', 'formatter': 'form1', 'level': 'NOTSET', 'stream': 'ext://sys.stdout', }, }, "loggers": { "my_test_logger_custom_formatter": { "level": "DEBUG", "handlers": ["hand1"], "propagate": "true" } } } # Configuration with custom function and 'validate' set to False custom_formatter_with_function = { 'version': 1, 'formatters': { 'form1': { '()': formatFunc, 'format': '%(levelname)s:%(name)s:%(message)s', 'validate': False, }, }, 'handlers' : { 'hand1' : { 'class': 'logging.StreamHandler', 'formatter': 'form1', 'level': 'NOTSET', 'stream': 'ext://sys.stdout', }, }, "loggers": { "my_test_logger_custom_formatter": { "level": "DEBUG", "handlers": ["hand1"], "propagate": "true" } } } def apply_config(self, conf): logging.config.dictConfig(conf) def test_config0_ok(self): # A simple config which overrides the default settings. with support.captured_stdout() as output: self.apply_config(self.config0) logger = logging.getLogger() # Won't output anything logger.info(self.next_message()) # Outputs a message logger.error(self.next_message()) self.assert_log_lines([ ('ERROR', '2'), ], stream=output) # Original logger output is empty. self.assert_log_lines([]) def test_config1_ok(self, config=config1): # A config defining a sub-parser as well. with support.captured_stdout() as output: self.apply_config(config) logger = logging.getLogger("compiler.parser") # Both will output a message logger.info(self.next_message()) logger.error(self.next_message()) self.assert_log_lines([ ('INFO', '1'), ('ERROR', '2'), ], stream=output) # Original logger output is empty. self.assert_log_lines([]) def test_config2_failure(self): # A simple config which overrides the default settings. self.assertRaises(Exception, self.apply_config, self.config2) def test_config2a_failure(self): # A simple config which overrides the default settings. self.assertRaises(Exception, self.apply_config, self.config2a) def test_config2b_failure(self): # A simple config which overrides the default settings. self.assertRaises(Exception, self.apply_config, self.config2b) def test_config3_failure(self): # A simple config which overrides the default settings. self.assertRaises(Exception, self.apply_config, self.config3) def test_config4_ok(self): # A config specifying a custom formatter class. with support.captured_stdout() as output: self.apply_config(self.config4) #logger = logging.getLogger() try: raise RuntimeError() except RuntimeError: logging.exception("just testing") sys.stdout.seek(0) self.assertEqual(output.getvalue(), "ERROR:root:just testing\nGot a [RuntimeError]\n") # Original logger output is empty self.assert_log_lines([]) def test_config4a_ok(self): # A config specifying a custom formatter class. with support.captured_stdout() as output: self.apply_config(self.config4a) #logger = logging.getLogger() try: raise RuntimeError() except RuntimeError: logging.exception("just testing") sys.stdout.seek(0) self.assertEqual(output.getvalue(), "ERROR:root:just testing\nGot a [RuntimeError]\n") # Original logger output is empty self.assert_log_lines([]) def test_config5_ok(self): self.test_config1_ok(config=self.config5) def test_config6_failure(self): self.assertRaises(Exception, self.apply_config, self.config6) def test_config7_ok(self): with support.captured_stdout() as output: self.apply_config(self.config1) logger = logging.getLogger("compiler.parser") # Both will output a message logger.info(self.next_message()) logger.error(self.next_message()) self.assert_log_lines([ ('INFO', '1'), ('ERROR', '2'), ], stream=output) # Original logger output is empty. self.assert_log_lines([]) with support.captured_stdout() as output: self.apply_config(self.config7) logger = logging.getLogger("compiler.parser") self.assertTrue(logger.disabled) logger = logging.getLogger("compiler.lexer") # Both will output a message logger.info(self.next_message()) logger.error(self.next_message()) self.assert_log_lines([ ('INFO', '3'), ('ERROR', '4'), ], stream=output) # Original logger output is empty. self.assert_log_lines([]) # Same as test_config_7_ok but don't disable old loggers. def test_config_8_ok(self): with support.captured_stdout() as output: self.apply_config(self.config1) logger = logging.getLogger("compiler.parser") # All will output a message logger.info(self.next_message()) logger.error(self.next_message()) self.assert_log_lines([ ('INFO', '1'), ('ERROR', '2'), ], stream=output) # Original logger output is empty. self.assert_log_lines([]) with support.captured_stdout() as output: self.apply_config(self.config8) logger = logging.getLogger("compiler.parser") self.assertFalse(logger.disabled) # Both will output a message logger.info(self.next_message()) logger.error(self.next_message()) logger = logging.getLogger("compiler.lexer") # Both will output a message logger.info(self.next_message()) logger.error(self.next_message()) self.assert_log_lines([ ('INFO', '3'), ('ERROR', '4'), ('INFO', '5'), ('ERROR', '6'), ], stream=output) # Original logger output is empty. self.assert_log_lines([]) def test_config_8a_ok(self): with support.captured_stdout() as output: self.apply_config(self.config1a) logger = logging.getLogger("compiler.parser") # See issue #11424. compiler-hyphenated sorts # between compiler and compiler.xyz and this # was preventing compiler.xyz from being included # in the child loggers of compiler because of an # overzealous loop termination condition. hyphenated = logging.getLogger('compiler-hyphenated') # All will output a message logger.info(self.next_message()) logger.error(self.next_message()) hyphenated.critical(self.next_message()) self.assert_log_lines([ ('INFO', '1'), ('ERROR', '2'), ('CRITICAL', '3'), ], stream=output) # Original logger output is empty. self.assert_log_lines([]) with support.captured_stdout() as output: self.apply_config(self.config8a) logger = logging.getLogger("compiler.parser") self.assertFalse(logger.disabled) # Both will output a message logger.info(self.next_message()) logger.error(self.next_message()) logger = logging.getLogger("compiler.lexer") # Both will output a message logger.info(self.next_message()) logger.error(self.next_message()) # Will not appear hyphenated.critical(self.next_message()) self.assert_log_lines([ ('INFO', '4'), ('ERROR', '5'), ('INFO', '6'), ('ERROR', '7'), ], stream=output) # Original logger output is empty. self.assert_log_lines([]) def test_config_9_ok(self): with support.captured_stdout() as output: self.apply_config(self.config9) logger = logging.getLogger("compiler.parser") # Nothing will be output since both handler and logger are set to WARNING logger.info(self.next_message()) self.assert_log_lines([], stream=output) self.apply_config(self.config9a) # Nothing will be output since handler is still set to WARNING logger.info(self.next_message()) self.assert_log_lines([], stream=output) self.apply_config(self.config9b) # Message should now be output logger.info(self.next_message()) self.assert_log_lines([ ('INFO', '3'), ], stream=output) def test_config_10_ok(self): with support.captured_stdout() as output: self.apply_config(self.config10) logger = logging.getLogger("compiler.parser") logger.warning(self.next_message()) logger = logging.getLogger('compiler') # Not output, because filtered logger.warning(self.next_message()) logger = logging.getLogger('compiler.lexer') # Not output, because filtered logger.warning(self.next_message()) logger = logging.getLogger("compiler.parser.codegen") # Output, as not filtered logger.error(self.next_message()) self.assert_log_lines([ ('WARNING', '1'), ('ERROR', '4'), ], stream=output) def test_config11_ok(self): self.test_config1_ok(self.config11) def test_config12_failure(self): self.assertRaises(Exception, self.apply_config, self.config12) def test_config13_failure(self): self.assertRaises(Exception, self.apply_config, self.config13) def test_config14_ok(self): with support.captured_stdout() as output: self.apply_config(self.config14) h = logging._handlers['hand1'] self.assertEqual(h.foo, 'bar') self.assertEqual(h.terminator, '!\n') logging.warning('Exclamation') self.assertTrue(output.getvalue().endswith('Exclamation!\n')) def test_config15_ok(self): def cleanup(h1, fn): h1.close() os.remove(fn) with self.check_no_resource_warning(): fd, fn = tempfile.mkstemp(".log", "test_logging-X-") os.close(fd) config = { "version": 1, "handlers": { "file": { "class": "logging.FileHandler", "filename": fn } }, "root": { "handlers": ["file"] } } self.apply_config(config) self.apply_config(config) handler = logging.root.handlers[0] self.addCleanup(cleanup, handler, fn) def setup_via_listener(self, text, verify=None): text = text.encode("utf-8") # Ask for a randomly assigned port (by using port 0) t = logging.config.listen(0, verify) t.start() t.ready.wait() # Now get the port allocated port = t.port t.ready.clear() try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.settimeout(2.0) sock.connect(('localhost', port)) slen = struct.pack('>L', len(text)) s = slen + text sentsofar = 0 left = len(s) while left > 0: sent = sock.send(s[sentsofar:]) sentsofar += sent left -= sent sock.close() finally: t.ready.wait(2.0) logging.config.stopListening() support.join_thread(t) def test_listen_config_10_ok(self): with support.captured_stdout() as output: self.setup_via_listener(json.dumps(self.config10)) logger = logging.getLogger("compiler.parser") logger.warning(self.next_message()) logger = logging.getLogger('compiler') # Not output, because filtered logger.warning(self.next_message()) logger = logging.getLogger('compiler.lexer') # Not output, because filtered logger.warning(self.next_message()) logger = logging.getLogger("compiler.parser.codegen") # Output, as not filtered logger.error(self.next_message()) self.assert_log_lines([ ('WARNING', '1'), ('ERROR', '4'), ], stream=output) def test_listen_config_1_ok(self): with support.captured_stdout() as output: self.setup_via_listener(textwrap.dedent(ConfigFileTest.config1)) logger = logging.getLogger("compiler.parser") # Both will output a message logger.info(self.next_message()) logger.error(self.next_message()) self.assert_log_lines([ ('INFO', '1'), ('ERROR', '2'), ], stream=output) # Original logger output is empty. self.assert_log_lines([]) def test_listen_verify(self): def verify_fail(stuff): return None def verify_reverse(stuff): return stuff[::-1] logger = logging.getLogger("compiler.parser") to_send = textwrap.dedent(ConfigFileTest.config1) # First, specify a verification function that will fail. # We expect to see no output, since our configuration # never took effect. with support.captured_stdout() as output: self.setup_via_listener(to_send, verify_fail) # Both will output a message logger.info(self.next_message()) logger.error(self.next_message()) self.assert_log_lines([], stream=output) # Original logger output has the stuff we logged. self.assert_log_lines([ ('INFO', '1'), ('ERROR', '2'), ], pat=r"^[\w.]+ -> (\w+): (\d+)$") # Now, perform no verification. Our configuration # should take effect. with support.captured_stdout() as output: self.setup_via_listener(to_send) # no verify callable specified logger = logging.getLogger("compiler.parser") # Both will output a message logger.info(self.next_message()) logger.error(self.next_message()) self.assert_log_lines([ ('INFO', '3'), ('ERROR', '4'), ], stream=output) # Original logger output still has the stuff we logged before. self.assert_log_lines([ ('INFO', '1'), ('ERROR', '2'), ], pat=r"^[\w.]+ -> (\w+): (\d+)$") # Now, perform verification which transforms the bytes. with support.captured_stdout() as output: self.setup_via_listener(to_send[::-1], verify_reverse) logger = logging.getLogger("compiler.parser") # Both will output a message logger.info(self.next_message()) logger.error(self.next_message()) self.assert_log_lines([ ('INFO', '5'), ('ERROR', '6'), ], stream=output) # Original logger output still has the stuff we logged before. self.assert_log_lines([ ('INFO', '1'), ('ERROR', '2'), ], pat=r"^[\w.]+ -> (\w+): (\d+)$") def test_out_of_order(self): self.assertRaises(ValueError, self.apply_config, self.out_of_order) def test_out_of_order_with_dollar_style(self): config = copy.deepcopy(self.out_of_order) config['formatters']['mySimpleFormatter']['format'] = "${asctime} (${name}) ${levelname}: ${message}" self.apply_config(config) handler = logging.getLogger('mymodule').handlers[0] self.assertIsInstance(handler.target, logging.Handler) self.assertIsInstance(handler.formatter._style, logging.StringTemplateStyle) def test_custom_formatter_class_with_validate(self): self.apply_config(self.custom_formatter_class_validate) handler = logging.getLogger("my_test_logger_custom_formatter").handlers[0] self.assertIsInstance(handler.formatter, ExceptionFormatter) def test_custom_formatter_class_with_validate2(self): self.apply_config(self.custom_formatter_class_validate2) handler = logging.getLogger("my_test_logger_custom_formatter").handlers[0] self.assertIsInstance(handler.formatter, ExceptionFormatter) def test_custom_formatter_class_with_validate2_with_wrong_fmt(self): config = self.custom_formatter_class_validate.copy() config['formatters']['form1']['style'] = "$" # Exception should not be raise as we have configured 'validate' to False self.apply_config(config) handler = logging.getLogger("my_test_logger_custom_formatter").handlers[0] self.assertIsInstance(handler.formatter, ExceptionFormatter) def test_custom_formatter_class_with_validate3(self): self.assertRaises(ValueError, self.apply_config, self.custom_formatter_class_validate3) def test_custom_formatter_function_with_validate(self): self.assertRaises(ValueError, self.apply_config, self.custom_formatter_with_function) def test_baseconfig(self): d = { 'atuple': (1, 2, 3), 'alist': ['a', 'b', 'c'], 'adict': {'d': 'e', 'f': 3 }, 'nest1': ('g', ('h', 'i'), 'j'), 'nest2': ['k', ['l', 'm'], 'n'], 'nest3': ['o', 'cfg://alist', 'p'], } bc = logging.config.BaseConfigurator(d) self.assertEqual(bc.convert('cfg://atuple[1]'), 2) self.assertEqual(bc.convert('cfg://alist[1]'), 'b') self.assertEqual(bc.convert('cfg://nest1[1][0]'), 'h') self.assertEqual(bc.convert('cfg://nest2[1][1]'), 'm') self.assertEqual(bc.convert('cfg://adict.d'), 'e') self.assertEqual(bc.convert('cfg://adict[f]'), 3) v = bc.convert('cfg://nest3') self.assertEqual(v.pop(1), ['a', 'b', 'c']) self.assertRaises(KeyError, bc.convert, 'cfg://nosuch') self.assertRaises(ValueError, bc.convert, 'cfg://!') self.assertRaises(KeyError, bc.convert, 'cfg://adict[2]') def test_namedtuple(self): # see bpo-39142 from collections import namedtuple class MyHandler(logging.StreamHandler): def __init__(self, resource, *args, **kwargs): super().__init__(*args, **kwargs) self.resource: namedtuple = resource def emit(self, record): record.msg += f' {self.resource.type}' return super().emit(record) Resource = namedtuple('Resource', ['type', 'labels']) resource = Resource(type='my_type', labels=['a']) config = { 'version': 1, 'handlers': { 'myhandler': { '()': MyHandler, 'resource': resource } }, 'root': {'level': 'INFO', 'handlers': ['myhandler']}, } with support.captured_stderr() as stderr: self.apply_config(config) logging.info('some log') self.assertEqual(stderr.getvalue(), 'some log my_type\n') class ManagerTest(BaseTest): def test_manager_loggerclass(self): logged = [] class MyLogger(logging.Logger): def _log(self, level, msg, args, exc_info=None, extra=None): logged.append(msg) man = logging.Manager(None) self.assertRaises(TypeError, man.setLoggerClass, int) man.setLoggerClass(MyLogger) logger = man.getLogger('test') logger.warning('should appear in logged') logging.warning('should not appear in logged') self.assertEqual(logged, ['should appear in logged']) def test_set_log_record_factory(self): man = logging.Manager(None) expected = object() man.setLogRecordFactory(expected) self.assertEqual(man.logRecordFactory, expected) class ChildLoggerTest(BaseTest): def test_child_loggers(self): r = logging.getLogger() l1 = logging.getLogger('abc') l2 = logging.getLogger('def.ghi') c1 = r.getChild('xyz') c2 = r.getChild('uvw.xyz') self.assertIs(c1, logging.getLogger('xyz')) self.assertIs(c2, logging.getLogger('uvw.xyz')) c1 = l1.getChild('def') c2 = c1.getChild('ghi') c3 = l1.getChild('def.ghi') self.assertIs(c1, logging.getLogger('abc.def')) self.assertIs(c2, logging.getLogger('abc.def.ghi')) self.assertIs(c2, c3) class DerivedLogRecord(logging.LogRecord): pass class LogRecordFactoryTest(BaseTest): def setUp(self): class CheckingFilter(logging.Filter): def __init__(self, cls): self.cls = cls def filter(self, record): t = type(record) if t is not self.cls: msg = 'Unexpected LogRecord type %s, expected %s' % (t, self.cls) raise TypeError(msg) return True BaseTest.setUp(self) self.filter = CheckingFilter(DerivedLogRecord) self.root_logger.addFilter(self.filter) self.orig_factory = logging.getLogRecordFactory() def tearDown(self): self.root_logger.removeFilter(self.filter) BaseTest.tearDown(self) logging.setLogRecordFactory(self.orig_factory) def test_logrecord_class(self): self.assertRaises(TypeError, self.root_logger.warning, self.next_message()) logging.setLogRecordFactory(DerivedLogRecord) self.root_logger.error(self.next_message()) self.assert_log_lines([ ('root', 'ERROR', '2'), ]) class QueueHandlerTest(BaseTest): # Do not bother with a logger name group. expected_log_pat = r"^[\w.]+ -> (\w+): (\d+)$" def setUp(self): BaseTest.setUp(self) self.queue = queue.Queue(-1) self.que_hdlr = logging.handlers.QueueHandler(self.queue) self.name = 'que' self.que_logger = logging.getLogger('que') self.que_logger.propagate = False self.que_logger.setLevel(logging.WARNING) self.que_logger.addHandler(self.que_hdlr) def tearDown(self): self.que_hdlr.close() BaseTest.tearDown(self) def test_queue_handler(self): self.que_logger.debug(self.next_message()) self.assertRaises(queue.Empty, self.queue.get_nowait) self.que_logger.info(self.next_message()) self.assertRaises(queue.Empty, self.queue.get_nowait) msg = self.next_message() self.que_logger.warning(msg) data = self.queue.get_nowait() self.assertTrue(isinstance(data, logging.LogRecord)) self.assertEqual(data.name, self.que_logger.name) self.assertEqual((data.msg, data.args), (msg, None)) def test_formatting(self): msg = self.next_message() levelname = logging.getLevelName(logging.WARNING) log_format_str = '{name} -> {levelname}: {message}' formatted_msg = log_format_str.format(name=self.name, levelname=levelname, message=msg) formatter = logging.Formatter(self.log_format) self.que_hdlr.setFormatter(formatter) self.que_logger.warning(msg) log_record = self.queue.get_nowait() self.assertEqual(formatted_msg, log_record.msg) self.assertEqual(formatted_msg, log_record.message) @unittest.skipUnless(hasattr(logging.handlers, 'QueueListener'), 'logging.handlers.QueueListener required for this test') def test_queue_listener(self): handler = support.TestHandler(support.Matcher()) listener = logging.handlers.QueueListener(self.queue, handler) listener.start() try: self.que_logger.warning(self.next_message()) self.que_logger.error(self.next_message()) self.que_logger.critical(self.next_message()) finally: listener.stop() self.assertTrue(handler.matches(levelno=logging.WARNING, message='1')) self.assertTrue(handler.matches(levelno=logging.ERROR, message='2')) self.assertTrue(handler.matches(levelno=logging.CRITICAL, message='3')) handler.close() # Now test with respect_handler_level set handler = support.TestHandler(support.Matcher()) handler.setLevel(logging.CRITICAL) listener = logging.handlers.QueueListener(self.queue, handler, respect_handler_level=True) listener.start() try: self.que_logger.warning(self.next_message()) self.que_logger.error(self.next_message()) self.que_logger.critical(self.next_message()) finally: listener.stop() self.assertFalse(handler.matches(levelno=logging.WARNING, message='4')) self.assertFalse(handler.matches(levelno=logging.ERROR, message='5')) self.assertTrue(handler.matches(levelno=logging.CRITICAL, message='6')) handler.close() @unittest.skipUnless(hasattr(logging.handlers, 'QueueListener'), 'logging.handlers.QueueListener required for this test') def test_queue_listener_with_StreamHandler(self): # Test that traceback only appends once (bpo-34334). listener = logging.handlers.QueueListener(self.queue, self.root_hdlr) listener.start() try: 1 / 0 except ZeroDivisionError as e: exc = e self.que_logger.exception(self.next_message(), exc_info=exc) listener.stop() self.assertEqual(self.stream.getvalue().strip().count('Traceback'), 1) @unittest.skipUnless(hasattr(logging.handlers, 'QueueListener'), 'logging.handlers.QueueListener required for this test') def test_queue_listener_with_multiple_handlers(self): # Test that queue handler format doesn't affect other handler formats (bpo-35726). self.que_hdlr.setFormatter(self.root_formatter) self.que_logger.addHandler(self.root_hdlr) listener = logging.handlers.QueueListener(self.queue, self.que_hdlr) listener.start() self.que_logger.error("error") listener.stop() self.assertEqual(self.stream.getvalue().strip(), "que -> ERROR: error") if hasattr(logging.handlers, 'QueueListener'): import multiprocessing from unittest.mock import patch class QueueListenerTest(BaseTest): """ Tests based on patch submitted for issue #27930. Ensure that QueueListener handles all log messages. """ repeat = 20 @staticmethod def setup_and_log(log_queue, ident): """ Creates a logger with a QueueHandler that logs to a queue read by a QueueListener. Starts the listener, logs five messages, and stops the listener. """ logger = logging.getLogger('test_logger_with_id_%s' % ident) logger.setLevel(logging.DEBUG) handler = logging.handlers.QueueHandler(log_queue) logger.addHandler(handler) listener = logging.handlers.QueueListener(log_queue) listener.start() logger.info('one') logger.info('two') logger.info('three') logger.info('four') logger.info('five') listener.stop() logger.removeHandler(handler) handler.close() @patch.object(logging.handlers.QueueListener, 'handle') def test_handle_called_with_queue_queue(self, mock_handle): for i in range(self.repeat): log_queue = queue.Queue() self.setup_and_log(log_queue, '%s_%s' % (self.id(), i)) self.assertEqual(mock_handle.call_count, 5 * self.repeat, 'correct number of handled log messages') @patch.object(logging.handlers.QueueListener, 'handle') def test_handle_called_with_mp_queue(self, mock_handle): # Issue 28668: The multiprocessing (mp) module is not functional # when the mp.synchronize module cannot be imported. support.import_module('multiprocessing.synchronize') for i in range(self.repeat): log_queue = multiprocessing.Queue() self.setup_and_log(log_queue, '%s_%s' % (self.id(), i)) log_queue.close() log_queue.join_thread() self.assertEqual(mock_handle.call_count, 5 * self.repeat, 'correct number of handled log messages') @staticmethod def get_all_from_queue(log_queue): try: while True: yield log_queue.get_nowait() except queue.Empty: return [] def test_no_messages_in_queue_after_stop(self): """ Five messages are logged then the QueueListener is stopped. This test then gets everything off the queue. Failure of this test indicates that messages were not registered on the queue until _after_ the QueueListener stopped. """ # Issue 28668: The multiprocessing (mp) module is not functional # when the mp.synchronize module cannot be imported. support.import_module('multiprocessing.synchronize') for i in range(self.repeat): queue = multiprocessing.Queue() self.setup_and_log(queue, '%s_%s' %(self.id(), i)) # time.sleep(1) items = list(self.get_all_from_queue(queue)) queue.close() queue.join_thread() expected = [[], [logging.handlers.QueueListener._sentinel]] self.assertIn(items, expected, 'Found unexpected messages in queue: %s' % ( [m.msg if isinstance(m, logging.LogRecord) else m for m in items])) def test_calls_task_done_after_stop(self): # Issue 36813: Make sure queue.join does not deadlock. log_queue = queue.Queue() listener = logging.handlers.QueueListener(log_queue) listener.start() listener.stop() with self.assertRaises(ValueError): # Make sure all tasks are done and .join won't block. log_queue.task_done() ZERO = datetime.timedelta(0) class UTC(datetime.tzinfo): def utcoffset(self, dt): return ZERO dst = utcoffset def tzname(self, dt): return 'UTC' utc = UTC() class FormatterTest(unittest.TestCase): def setUp(self): self.common = { 'name': 'formatter.test', 'level': logging.DEBUG, 'pathname': os.path.join('path', 'to', 'dummy.ext'), 'lineno': 42, 'exc_info': None, 'func': None, 'msg': 'Message with %d %s', 'args': (2, 'placeholders'), } self.variants = { } def get_record(self, name=None): result = dict(self.common) if name is not None: result.update(self.variants[name]) return logging.makeLogRecord(result) def assert_error_message(self, exception, message, *args, **kwargs): try: self.assertRaises(exception, *args, **kwargs) except exception as e: self.assertEqual(message, e.message) def test_percent(self): # Test %-formatting r = self.get_record() f = logging.Formatter('${%(message)s}') self.assertEqual(f.format(r), '${Message with 2 placeholders}') f = logging.Formatter('%(random)s') self.assertRaises(ValueError, f.format, r) self.assertFalse(f.usesTime()) f = logging.Formatter('%(asctime)s') self.assertTrue(f.usesTime()) f = logging.Formatter('%(asctime)-15s') self.assertTrue(f.usesTime()) f = logging.Formatter('%(asctime)#15s') self.assertTrue(f.usesTime()) def test_braces(self): # Test {}-formatting r = self.get_record() f = logging.Formatter('$%{message}%$', style='{') self.assertEqual(f.format(r), '$%Message with 2 placeholders%$') f = logging.Formatter('{random}', style='{') self.assertRaises(ValueError, f.format, r) f = logging.Formatter("{message}", style='{') self.assertFalse(f.usesTime()) f = logging.Formatter('{asctime}', style='{') self.assertTrue(f.usesTime()) f = logging.Formatter('{asctime!s:15}', style='{') self.assertTrue(f.usesTime()) f = logging.Formatter('{asctime:15}', style='{') self.assertTrue(f.usesTime()) def test_dollars(self): # Test $-formatting r = self.get_record() f = logging.Formatter('${message}', style='$') self.assertEqual(f.format(r), 'Message with 2 placeholders') f = logging.Formatter('$message', style='$') self.assertEqual(f.format(r), 'Message with 2 placeholders') f = logging.Formatter('$$%${message}%$$', style='$') self.assertEqual(f.format(r), '$%Message with 2 placeholders%$') f = logging.Formatter('${random}', style='$') self.assertRaises(ValueError, f.format, r) self.assertFalse(f.usesTime()) f = logging.Formatter('${asctime}', style='$') self.assertTrue(f.usesTime()) f = logging.Formatter('$asctime', style='$') self.assertTrue(f.usesTime()) f = logging.Formatter('${message}', style='$') self.assertFalse(f.usesTime()) f = logging.Formatter('${asctime}--', style='$') self.assertTrue(f.usesTime()) def test_format_validate(self): # Check correct formatting # Percentage style f = logging.Formatter("%(levelname)-15s - %(message) 5s - %(process)03d - %(module) - %(asctime)*.3s") self.assertEqual(f._fmt, "%(levelname)-15s - %(message) 5s - %(process)03d - %(module) - %(asctime)*.3s") f = logging.Formatter("%(asctime)*s - %(asctime)*.3s - %(process)-34.33o") self.assertEqual(f._fmt, "%(asctime)*s - %(asctime)*.3s - %(process)-34.33o") f = logging.Formatter("%(process)#+027.23X") self.assertEqual(f._fmt, "%(process)#+027.23X") f = logging.Formatter("%(foo)#.*g") self.assertEqual(f._fmt, "%(foo)#.*g") # StrFormat Style f = logging.Formatter("$%{message}%$ - {asctime!a:15} - {customfield['key']}", style="{") self.assertEqual(f._fmt, "$%{message}%$ - {asctime!a:15} - {customfield['key']}") f = logging.Formatter("{process:.2f} - {custom.f:.4f}", style="{") self.assertEqual(f._fmt, "{process:.2f} - {custom.f:.4f}") f = logging.Formatter("{customfield!s:#<30}", style="{") self.assertEqual(f._fmt, "{customfield!s:#<30}") f = logging.Formatter("{message!r}", style="{") self.assertEqual(f._fmt, "{message!r}") f = logging.Formatter("{message!s}", style="{") self.assertEqual(f._fmt, "{message!s}") f = logging.Formatter("{message!a}", style="{") self.assertEqual(f._fmt, "{message!a}") f = logging.Formatter("{process!r:4.2}", style="{") self.assertEqual(f._fmt, "{process!r:4.2}") f = logging.Formatter("{process!s:<#30,.12f}- {custom:=+#30,.1d} - {module:^30}", style="{") self.assertEqual(f._fmt, "{process!s:<#30,.12f}- {custom:=+#30,.1d} - {module:^30}") f = logging.Formatter("{process!s:{w},.{p}}", style="{") self.assertEqual(f._fmt, "{process!s:{w},.{p}}") f = logging.Formatter("{foo:12.{p}}", style="{") self.assertEqual(f._fmt, "{foo:12.{p}}") f = logging.Formatter("{foo:{w}.6}", style="{") self.assertEqual(f._fmt, "{foo:{w}.6}") f = logging.Formatter("{foo[0].bar[1].baz}", style="{") self.assertEqual(f._fmt, "{foo[0].bar[1].baz}") f = logging.Formatter("{foo[k1].bar[k2].baz}", style="{") self.assertEqual(f._fmt, "{foo[k1].bar[k2].baz}") f = logging.Formatter("{12[k1].bar[k2].baz}", style="{") self.assertEqual(f._fmt, "{12[k1].bar[k2].baz}") # Dollar style f = logging.Formatter("${asctime} - $message", style="$") self.assertEqual(f._fmt, "${asctime} - $message") f = logging.Formatter("$bar $$", style="$") self.assertEqual(f._fmt, "$bar $$") f = logging.Formatter("$bar $$$$", style="$") self.assertEqual(f._fmt, "$bar $$$$") # this would print two $($$) # Testing when ValueError being raised from incorrect format # Percentage Style self.assertRaises(ValueError, logging.Formatter, "%(asctime)Z") self.assertRaises(ValueError, logging.Formatter, "%(asctime)b") self.assertRaises(ValueError, logging.Formatter, "%(asctime)*") self.assertRaises(ValueError, logging.Formatter, "%(asctime)*3s") self.assertRaises(ValueError, logging.Formatter, "%(asctime)_") self.assertRaises(ValueError, logging.Formatter, '{asctime}') self.assertRaises(ValueError, logging.Formatter, '${message}') self.assertRaises(ValueError, logging.Formatter, '%(foo)#12.3*f') # with both * and decimal number as precision self.assertRaises(ValueError, logging.Formatter, '%(foo)0*.8*f') # StrFormat Style # Testing failure for '-' in field name self.assert_error_message( ValueError, "invalid field name/expression: 'name-thing'", logging.Formatter, "{name-thing}", style="{" ) # Testing failure for style mismatch self.assert_error_message( ValueError, "invalid format: no fields", logging.Formatter, '%(asctime)s', style='{' ) # Testing failure for invalid conversion self.assert_error_message( ValueError, "invalid conversion: 'Z'" ) self.assertRaises(ValueError, logging.Formatter, '{asctime!s:#30,15f}', style='{') self.assert_error_message( ValueError, "invalid format: expected ':' after conversion specifier", logging.Formatter, '{asctime!aa:15}', style='{' ) # Testing failure for invalid spec self.assert_error_message( ValueError, "bad specifier: '.2ff'", logging.Formatter, '{process:.2ff}', style='{' ) self.assertRaises(ValueError, logging.Formatter, '{process:.2Z}', style='{') self.assertRaises(ValueError, logging.Formatter, '{process!s:<##30,12g}', style='{') self.assertRaises(ValueError, logging.Formatter, '{process!s:<#30#,12g}', style='{') self.assertRaises(ValueError, logging.Formatter, '{process!s:{{w}},{{p}}}', style='{') # Testing failure for mismatch braces self.assert_error_message( ValueError, "invalid format: unmatched '{' in format spec", logging.Formatter, '{process', style='{' ) self.assert_error_message( ValueError, "invalid format: unmatched '{' in format spec", logging.Formatter, 'process}', style='{' ) self.assertRaises(ValueError, logging.Formatter, '{{foo!r:4.2}', style='{') self.assertRaises(ValueError, logging.Formatter, '{{foo!r:4.2}}', style='{') self.assertRaises(ValueError, logging.Formatter, '{foo/bar}', style='{') self.assertRaises(ValueError, logging.Formatter, '{foo:{{w}}.{{p}}}}', style='{') self.assertRaises(ValueError, logging.Formatter, '{foo!X:{{w}}.{{p}}}', style='{') self.assertRaises(ValueError, logging.Formatter, '{foo!a:random}', style='{') self.assertRaises(ValueError, logging.Formatter, '{foo!a:ran{dom}', style='{') self.assertRaises(ValueError, logging.Formatter, '{foo!a:ran{d}om}', style='{') self.assertRaises(ValueError, logging.Formatter, '{foo.!a:d}', style='{') # Dollar style # Testing failure for mismatch bare $ self.assert_error_message( ValueError, "invalid format: bare \'$\' not allowed", logging.Formatter, '$bar $$$', style='$' ) self.assert_error_message( ValueError, "invalid format: bare \'$\' not allowed", logging.Formatter, 'bar $', style='$' ) self.assert_error_message( ValueError, "invalid format: bare \'$\' not allowed", logging.Formatter, 'foo $.', style='$' ) # Testing failure for mismatch style self.assert_error_message( ValueError, "invalid format: no fields", logging.Formatter, '{asctime}', style='$' ) self.assertRaises(ValueError, logging.Formatter, '%(asctime)s', style='$') # Testing failure for incorrect fields self.assert_error_message( ValueError, "invalid format: no fields", logging.Formatter, 'foo', style='$' ) self.assertRaises(ValueError, logging.Formatter, '${asctime', style='$') def test_invalid_style(self): self.assertRaises(ValueError, logging.Formatter, None, None, 'x') def test_time(self): r = self.get_record() dt = datetime.datetime(1993, 4, 21, 8, 3, 0, 0, utc) # We use None to indicate we want the local timezone # We're essentially converting a UTC time to local time r.created = time.mktime(dt.astimezone(None).timetuple()) r.msecs = 123 f = logging.Formatter('%(asctime)s %(message)s') f.converter = time.gmtime self.assertEqual(f.formatTime(r), '1993-04-21 08:03:00,123') self.assertEqual(f.formatTime(r, '%Y:%d'), '1993:21') f.format(r) self.assertEqual(r.asctime, '1993-04-21 08:03:00,123') class TestBufferingFormatter(logging.BufferingFormatter): def formatHeader(self, records): return '[(%d)' % len(records) def formatFooter(self, records): return '(%d)]' % len(records) class BufferingFormatterTest(unittest.TestCase): def setUp(self): self.records = [ logging.makeLogRecord({'msg': 'one'}), logging.makeLogRecord({'msg': 'two'}), ] def test_default(self): f = logging.BufferingFormatter() self.assertEqual('', f.format([])) self.assertEqual('onetwo', f.format(self.records)) def test_custom(self): f = TestBufferingFormatter() self.assertEqual('[(2)onetwo(2)]', f.format(self.records)) lf = logging.Formatter('<%(message)s>') f = TestBufferingFormatter(lf) self.assertEqual('[(2)<one><two>(2)]', f.format(self.records)) class ExceptionTest(BaseTest): def test_formatting(self): r = self.root_logger h = RecordingHandler() r.addHandler(h) try: raise RuntimeError('deliberate mistake') except: logging.exception('failed', stack_info=True) r.removeHandler(h) h.close() r = h.records[0] self.assertTrue(r.exc_text.startswith('Traceback (most recent ' 'call last):\n')) self.assertTrue(r.exc_text.endswith('\nRuntimeError: ' 'deliberate mistake')) self.assertTrue(r.stack_info.startswith('Stack (most recent ' 'call last):\n')) self.assertTrue(r.stack_info.endswith('logging.exception(\'failed\', ' 'stack_info=True)')) class LastResortTest(BaseTest): def test_last_resort(self): # Test the last resort handler root = self.root_logger root.removeHandler(self.root_hdlr) old_lastresort = logging.lastResort old_raise_exceptions = logging.raiseExceptions try: with support.captured_stderr() as stderr: root.debug('This should not appear') self.assertEqual(stderr.getvalue(), '') root.warning('Final chance!') self.assertEqual(stderr.getvalue(), 'Final chance!\n') # No handlers and no last resort, so 'No handlers' message logging.lastResort = None with support.captured_stderr() as stderr: root.warning('Final chance!') msg = 'No handlers could be found for logger "root"\n' self.assertEqual(stderr.getvalue(), msg) # 'No handlers' message only printed once with support.captured_stderr() as stderr: root.warning('Final chance!') self.assertEqual(stderr.getvalue(), '') # If raiseExceptions is False, no message is printed root.manager.emittedNoHandlerWarning = False logging.raiseExceptions = False with support.captured_stderr() as stderr: root.warning('Final chance!') self.assertEqual(stderr.getvalue(), '') finally: root.addHandler(self.root_hdlr) logging.lastResort = old_lastresort logging.raiseExceptions = old_raise_exceptions class FakeHandler: def __init__(self, identifier, called): for method in ('acquire', 'flush', 'close', 'release'): setattr(self, method, self.record_call(identifier, method, called)) def record_call(self, identifier, method_name, called): def inner(): called.append('{} - {}'.format(identifier, method_name)) return inner class RecordingHandler(logging.NullHandler): def __init__(self, *args, **kwargs): super(RecordingHandler, self).__init__(*args, **kwargs) self.records = [] def handle(self, record): """Keep track of all the emitted records.""" self.records.append(record) class ShutdownTest(BaseTest): """Test suite for the shutdown method.""" def setUp(self): super(ShutdownTest, self).setUp() self.called = [] raise_exceptions = logging.raiseExceptions self.addCleanup(setattr, logging, 'raiseExceptions', raise_exceptions) def raise_error(self, error): def inner(): raise error() return inner def test_no_failure(self): # create some fake handlers handler0 = FakeHandler(0, self.called) handler1 = FakeHandler(1, self.called) handler2 = FakeHandler(2, self.called) # create live weakref to those handlers handlers = map(logging.weakref.ref, [handler0, handler1, handler2]) logging.shutdown(handlerList=list(handlers)) expected = ['2 - acquire', '2 - flush', '2 - close', '2 - release', '1 - acquire', '1 - flush', '1 - close', '1 - release', '0 - acquire', '0 - flush', '0 - close', '0 - release'] self.assertEqual(expected, self.called) def _test_with_failure_in_method(self, method, error): handler = FakeHandler(0, self.called) setattr(handler, method, self.raise_error(error)) handlers = [logging.weakref.ref(handler)] logging.shutdown(handlerList=list(handlers)) self.assertEqual('0 - release', self.called[-1]) def test_with_ioerror_in_acquire(self): self._test_with_failure_in_method('acquire', OSError) def test_with_ioerror_in_flush(self): self._test_with_failure_in_method('flush', OSError) def test_with_ioerror_in_close(self): self._test_with_failure_in_method('close', OSError) def test_with_valueerror_in_acquire(self): self._test_with_failure_in_method('acquire', ValueError) def test_with_valueerror_in_flush(self): self._test_with_failure_in_method('flush', ValueError) def test_with_valueerror_in_close(self): self._test_with_failure_in_method('close', ValueError) def test_with_other_error_in_acquire_without_raise(self): logging.raiseExceptions = False self._test_with_failure_in_method('acquire', IndexError) def test_with_other_error_in_flush_without_raise(self): logging.raiseExceptions = False self._test_with_failure_in_method('flush', IndexError) def test_with_other_error_in_close_without_raise(self): logging.raiseExceptions = False self._test_with_failure_in_method('close', IndexError) def test_with_other_error_in_acquire_with_raise(self): logging.raiseExceptions = True self.assertRaises(IndexError, self._test_with_failure_in_method, 'acquire', IndexError) def test_with_other_error_in_flush_with_raise(self): logging.raiseExceptions = True self.assertRaises(IndexError, self._test_with_failure_in_method, 'flush', IndexError) def test_with_other_error_in_close_with_raise(self): logging.raiseExceptions = True self.assertRaises(IndexError, self._test_with_failure_in_method, 'close', IndexError) class ModuleLevelMiscTest(BaseTest): """Test suite for some module level methods.""" def test_disable(self): old_disable = logging.root.manager.disable # confirm our assumptions are correct self.assertEqual(old_disable, 0) self.addCleanup(logging.disable, old_disable) logging.disable(83) self.assertEqual(logging.root.manager.disable, 83) # test the default value introduced in 3.7 # (Issue #28524) logging.disable() self.assertEqual(logging.root.manager.disable, logging.CRITICAL) def _test_log(self, method, level=None): called = [] support.patch(self, logging, 'basicConfig', lambda *a, **kw: called.append((a, kw))) recording = RecordingHandler() logging.root.addHandler(recording) log_method = getattr(logging, method) if level is not None: log_method(level, "test me: %r", recording) else: log_method("test me: %r", recording) self.assertEqual(len(recording.records), 1) record = recording.records[0] self.assertEqual(record.getMessage(), "test me: %r" % recording) expected_level = level if level is not None else getattr(logging, method.upper()) self.assertEqual(record.levelno, expected_level) # basicConfig was not called! self.assertEqual(called, []) def test_log(self): self._test_log('log', logging.ERROR) def test_debug(self): self._test_log('debug') def test_info(self): self._test_log('info') def test_warning(self): self._test_log('warning') def test_error(self): self._test_log('error') def test_critical(self): self._test_log('critical') def test_set_logger_class(self): self.assertRaises(TypeError, logging.setLoggerClass, object) class MyLogger(logging.Logger): pass logging.setLoggerClass(MyLogger) self.assertEqual(logging.getLoggerClass(), MyLogger) logging.setLoggerClass(logging.Logger) self.assertEqual(logging.getLoggerClass(), logging.Logger) def test_subclass_logger_cache(self): # bpo-37258 message = [] class MyLogger(logging.getLoggerClass()): def __init__(self, name='MyLogger', level=logging.NOTSET): super().__init__(name, level) message.append('initialized') logging.setLoggerClass(MyLogger) logger = logging.getLogger('just_some_logger') self.assertEqual(message, ['initialized']) stream = io.StringIO() h = logging.StreamHandler(stream) logger.addHandler(h) try: logger.setLevel(logging.DEBUG) logger.debug("hello") self.assertEqual(stream.getvalue().strip(), "hello") stream.truncate(0) stream.seek(0) logger.setLevel(logging.INFO) logger.debug("hello") self.assertEqual(stream.getvalue(), "") finally: logger.removeHandler(h) h.close() logging.setLoggerClass(logging.Logger) def test_logging_at_shutdown(self): # Issue #20037 code = """if 1: import logging class A: def __del__(self): try: raise ValueError("some error") except Exception: logging.exception("exception in __del__") a = A()""" rc, out, err = assert_python_ok("-c", code) err = err.decode() self.assertIn("exception in __del__", err) self.assertIn("ValueError: some error", err) def test_recursion_error(self): # Issue 36272 code = """if 1: import logging def rec(): logging.error("foo") rec() rec()""" rc, out, err = assert_python_failure("-c", code) err = err.decode() self.assertNotIn("Cannot recover from stack overflow.", err) self.assertEqual(rc, 1) class LogRecordTest(BaseTest): def test_str_rep(self): r = logging.makeLogRecord({}) s = str(r) self.assertTrue(s.startswith('<LogRecord: ')) self.assertTrue(s.endswith('>')) def test_dict_arg(self): h = RecordingHandler() r = logging.getLogger() r.addHandler(h) d = {'less' : 'more' } logging.warning('less is %(less)s', d) self.assertIs(h.records[0].args, d) self.assertEqual(h.records[0].message, 'less is more') r.removeHandler(h) h.close() def test_multiprocessing(self): r = logging.makeLogRecord({}) self.assertEqual(r.processName, 'MainProcess') try: import multiprocessing as mp r = logging.makeLogRecord({}) self.assertEqual(r.processName, mp.current_process().name) except ImportError: pass def test_optional(self): r = logging.makeLogRecord({}) NOT_NONE = self.assertIsNotNone NOT_NONE(r.thread) NOT_NONE(r.threadName) NOT_NONE(r.process) NOT_NONE(r.processName) log_threads = logging.logThreads log_processes = logging.logProcesses log_multiprocessing = logging.logMultiprocessing try: logging.logThreads = False logging.logProcesses = False logging.logMultiprocessing = False r = logging.makeLogRecord({}) NONE = self.assertIsNone NONE(r.thread) NONE(r.threadName) NONE(r.process) NONE(r.processName) finally: logging.logThreads = log_threads logging.logProcesses = log_processes logging.logMultiprocessing = log_multiprocessing class BasicConfigTest(unittest.TestCase): """Test suite for logging.basicConfig.""" def setUp(self): super(BasicConfigTest, self).setUp() self.handlers = logging.root.handlers self.saved_handlers = logging._handlers.copy() self.saved_handler_list = logging._handlerList[:] self.original_logging_level = logging.root.level self.addCleanup(self.cleanup) logging.root.handlers = [] def tearDown(self): for h in logging.root.handlers[:]: logging.root.removeHandler(h) h.close() super(BasicConfigTest, self).tearDown() def cleanup(self): setattr(logging.root, 'handlers', self.handlers) logging._handlers.clear() logging._handlers.update(self.saved_handlers) logging._handlerList[:] = self.saved_handler_list logging.root.setLevel(self.original_logging_level) def test_no_kwargs(self): logging.basicConfig() # handler defaults to a StreamHandler to sys.stderr self.assertEqual(len(logging.root.handlers), 1) handler = logging.root.handlers[0] self.assertIsInstance(handler, logging.StreamHandler) self.assertEqual(handler.stream, sys.stderr) formatter = handler.formatter # format defaults to logging.BASIC_FORMAT self.assertEqual(formatter._style._fmt, logging.BASIC_FORMAT) # datefmt defaults to None self.assertIsNone(formatter.datefmt) # style defaults to % self.assertIsInstance(formatter._style, logging.PercentStyle) # level is not explicitly set self.assertEqual(logging.root.level, self.original_logging_level) def test_strformatstyle(self): with support.captured_stdout() as output: logging.basicConfig(stream=sys.stdout, style="{") logging.error("Log an error") sys.stdout.seek(0) self.assertEqual(output.getvalue().strip(), "ERROR:root:Log an error") def test_stringtemplatestyle(self): with support.captured_stdout() as output: logging.basicConfig(stream=sys.stdout, style="$") logging.error("Log an error") sys.stdout.seek(0) self.assertEqual(output.getvalue().strip(), "ERROR:root:Log an error") def test_filename(self): def cleanup(h1, h2, fn): h1.close() h2.close() os.remove(fn) logging.basicConfig(filename='test.log') self.assertEqual(len(logging.root.handlers), 1) handler = logging.root.handlers[0] self.assertIsInstance(handler, logging.FileHandler) expected = logging.FileHandler('test.log', 'a') self.assertEqual(handler.stream.mode, expected.stream.mode) self.assertEqual(handler.stream.name, expected.stream.name) self.addCleanup(cleanup, handler, expected, 'test.log') def test_filemode(self): def cleanup(h1, h2, fn): h1.close() h2.close() os.remove(fn) logging.basicConfig(filename='test.log', filemode='wb') handler = logging.root.handlers[0] expected = logging.FileHandler('test.log', 'wb') self.assertEqual(handler.stream.mode, expected.stream.mode) self.addCleanup(cleanup, handler, expected, 'test.log') def test_stream(self): stream = io.StringIO() self.addCleanup(stream.close) logging.basicConfig(stream=stream) self.assertEqual(len(logging.root.handlers), 1) handler = logging.root.handlers[0] self.assertIsInstance(handler, logging.StreamHandler) self.assertEqual(handler.stream, stream) def test_format(self): logging.basicConfig(format='%(asctime)s - %(message)s') formatter = logging.root.handlers[0].formatter self.assertEqual(formatter._style._fmt, '%(asctime)s - %(message)s') def test_datefmt(self): logging.basicConfig(datefmt='bar') formatter = logging.root.handlers[0].formatter self.assertEqual(formatter.datefmt, 'bar') def test_style(self): logging.basicConfig(style='$') formatter = logging.root.handlers[0].formatter self.assertIsInstance(formatter._style, logging.StringTemplateStyle) def test_level(self): old_level = logging.root.level self.addCleanup(logging.root.setLevel, old_level) logging.basicConfig(level=57) self.assertEqual(logging.root.level, 57) # Test that second call has no effect logging.basicConfig(level=58) self.assertEqual(logging.root.level, 57) def test_incompatible(self): assertRaises = self.assertRaises handlers = [logging.StreamHandler()] stream = sys.stderr assertRaises(ValueError, logging.basicConfig, filename='test.log', stream=stream) assertRaises(ValueError, logging.basicConfig, filename='test.log', handlers=handlers) assertRaises(ValueError, logging.basicConfig, stream=stream, handlers=handlers) # Issue 23207: test for invalid kwargs assertRaises(ValueError, logging.basicConfig, loglevel=logging.INFO) # Should pop both filename and filemode even if filename is None logging.basicConfig(filename=None, filemode='a') def test_handlers(self): handlers = [ logging.StreamHandler(), logging.StreamHandler(sys.stdout), logging.StreamHandler(), ] f = logging.Formatter() handlers[2].setFormatter(f) logging.basicConfig(handlers=handlers) self.assertIs(handlers[0], logging.root.handlers[0]) self.assertIs(handlers[1], logging.root.handlers[1]) self.assertIs(handlers[2], logging.root.handlers[2]) self.assertIsNotNone(handlers[0].formatter) self.assertIsNotNone(handlers[1].formatter) self.assertIs(handlers[2].formatter, f) self.assertIs(handlers[0].formatter, handlers[1].formatter) def test_force(self): old_string_io = io.StringIO() new_string_io = io.StringIO() old_handlers = [logging.StreamHandler(old_string_io)] new_handlers = [logging.StreamHandler(new_string_io)] logging.basicConfig(level=logging.WARNING, handlers=old_handlers) logging.warning('warn') logging.info('info') logging.debug('debug') self.assertEqual(len(logging.root.handlers), 1) logging.basicConfig(level=logging.INFO, handlers=new_handlers, force=True) logging.warning('warn') logging.info('info') logging.debug('debug') self.assertEqual(len(logging.root.handlers), 1) self.assertEqual(old_string_io.getvalue().strip(), 'WARNING:root:warn') self.assertEqual(new_string_io.getvalue().strip(), 'WARNING:root:warn\nINFO:root:info') def test_encoding(self): try: encoding = 'utf-8' logging.basicConfig(filename='test.log', encoding=encoding, errors='strict', format='%(message)s', level=logging.DEBUG) self.assertEqual(len(logging.root.handlers), 1) handler = logging.root.handlers[0] self.assertIsInstance(handler, logging.FileHandler) self.assertEqual(handler.encoding, encoding) logging.debug('The Øresund Bridge joins Copenhagen to Malmö') finally: handler.close() with open('test.log', encoding='utf-8') as f: data = f.read().strip() os.remove('test.log') self.assertEqual(data, 'The Øresund Bridge joins Copenhagen to Malmö') def test_encoding_errors(self): try: encoding = 'ascii' logging.basicConfig(filename='test.log', encoding=encoding, errors='ignore', format='%(message)s', level=logging.DEBUG) self.assertEqual(len(logging.root.handlers), 1) handler = logging.root.handlers[0] self.assertIsInstance(handler, logging.FileHandler) self.assertEqual(handler.encoding, encoding) logging.debug('The Øresund Bridge joins Copenhagen to Malmö') finally: handler.close() with open('test.log', encoding='utf-8') as f: data = f.read().strip() os.remove('test.log') self.assertEqual(data, 'The resund Bridge joins Copenhagen to Malm') def test_encoding_errors_default(self): try: encoding = 'ascii' logging.basicConfig(filename='test.log', encoding=encoding, format='%(message)s', level=logging.DEBUG) self.assertEqual(len(logging.root.handlers), 1) handler = logging.root.handlers[0] self.assertIsInstance(handler, logging.FileHandler) self.assertEqual(handler.encoding, encoding) self.assertEqual(handler.errors, 'backslashreplace') logging.debug('😂: ☃️: The Øresund Bridge joins Copenhagen to Malmö') finally: handler.close() with open('test.log', encoding='utf-8') as f: data = f.read().strip() os.remove('test.log') self.assertEqual(data, r'\U0001f602: \u2603\ufe0f: The \xd8resund ' r'Bridge joins Copenhagen to Malm\xf6') def test_encoding_errors_none(self): # Specifying None should behave as 'strict' try: encoding = 'ascii' logging.basicConfig(filename='test.log', encoding=encoding, errors=None, format='%(message)s', level=logging.DEBUG) self.assertEqual(len(logging.root.handlers), 1) handler = logging.root.handlers[0] self.assertIsInstance(handler, logging.FileHandler) self.assertEqual(handler.encoding, encoding) self.assertIsNone(handler.errors) message = [] def dummy_handle_error(record): _, v, _ = sys.exc_info() message.append(str(v)) handler.handleError = dummy_handle_error logging.debug('The Øresund Bridge joins Copenhagen to Malmö') self.assertTrue(message) self.assertIn("'ascii' codec can't encode " "character '\\xd8' in position 4:", message[0]) finally: handler.close() with open('test.log', encoding='utf-8') as f: data = f.read().strip() os.remove('test.log') # didn't write anything due to the encoding error self.assertEqual(data, r'') def _test_log(self, method, level=None): # logging.root has no handlers so basicConfig should be called called = [] old_basic_config = logging.basicConfig def my_basic_config(*a, **kw): old_basic_config() old_level = logging.root.level logging.root.setLevel(100) # avoid having messages in stderr self.addCleanup(logging.root.setLevel, old_level) called.append((a, kw)) support.patch(self, logging, 'basicConfig', my_basic_config) log_method = getattr(logging, method) if level is not None: log_method(level, "test me") else: log_method("test me") # basicConfig was called with no arguments self.assertEqual(called, [((), {})]) def test_log(self): self._test_log('log', logging.WARNING) def test_debug(self): self._test_log('debug') def test_info(self): self._test_log('info') def test_warning(self): self._test_log('warning') def test_error(self): self._test_log('error') def test_critical(self): self._test_log('critical') class LoggerAdapterTest(unittest.TestCase): def setUp(self): super(LoggerAdapterTest, self).setUp() old_handler_list = logging._handlerList[:] self.recording = RecordingHandler() self.logger = logging.root self.logger.addHandler(self.recording) self.addCleanup(self.logger.removeHandler, self.recording) self.addCleanup(self.recording.close) def cleanup(): logging._handlerList[:] = old_handler_list self.addCleanup(cleanup) self.addCleanup(logging.shutdown) self.adapter = logging.LoggerAdapter(logger=self.logger, extra=None) def test_exception(self): msg = 'testing exception: %r' exc = None try: 1 / 0 except ZeroDivisionError as e: exc = e self.adapter.exception(msg, self.recording) self.assertEqual(len(self.recording.records), 1) record = self.recording.records[0] self.assertEqual(record.levelno, logging.ERROR) self.assertEqual(record.msg, msg) self.assertEqual(record.args, (self.recording,)) self.assertEqual(record.exc_info, (exc.__class__, exc, exc.__traceback__)) def test_exception_excinfo(self): try: 1 / 0 except ZeroDivisionError as e: exc = e self.adapter.exception('exc_info test', exc_info=exc) self.assertEqual(len(self.recording.records), 1) record = self.recording.records[0] self.assertEqual(record.exc_info, (exc.__class__, exc, exc.__traceback__)) def test_critical(self): msg = 'critical test! %r' self.adapter.critical(msg, self.recording) self.assertEqual(len(self.recording.records), 1) record = self.recording.records[0] self.assertEqual(record.levelno, logging.CRITICAL) self.assertEqual(record.msg, msg) self.assertEqual(record.args, (self.recording,)) def test_is_enabled_for(self): old_disable = self.adapter.logger.manager.disable self.adapter.logger.manager.disable = 33 self.addCleanup(setattr, self.adapter.logger.manager, 'disable', old_disable) self.assertFalse(self.adapter.isEnabledFor(32)) def test_has_handlers(self): self.assertTrue(self.adapter.hasHandlers()) for handler in self.logger.handlers: self.logger.removeHandler(handler) self.assertFalse(self.logger.hasHandlers()) self.assertFalse(self.adapter.hasHandlers()) def test_nested(self): class Adapter(logging.LoggerAdapter): prefix = 'Adapter' def process(self, msg, kwargs): return f"{self.prefix} {msg}", kwargs msg = 'Adapters can be nested, yo.' adapter = Adapter(logger=self.logger, extra=None) adapter_adapter = Adapter(logger=adapter, extra=None) adapter_adapter.prefix = 'AdapterAdapter' self.assertEqual(repr(adapter), repr(adapter_adapter)) adapter_adapter.log(logging.CRITICAL, msg, self.recording) self.assertEqual(len(self.recording.records), 1) record = self.recording.records[0] self.assertEqual(record.levelno, logging.CRITICAL) self.assertEqual(record.msg, f"Adapter AdapterAdapter {msg}") self.assertEqual(record.args, (self.recording,)) orig_manager = adapter_adapter.manager self.assertIs(adapter.manager, orig_manager) self.assertIs(self.logger.manager, orig_manager) temp_manager = object() try: adapter_adapter.manager = temp_manager self.assertIs(adapter_adapter.manager, temp_manager) self.assertIs(adapter.manager, temp_manager) self.assertIs(self.logger.manager, temp_manager) finally: adapter_adapter.manager = orig_manager self.assertIs(adapter_adapter.manager, orig_manager) self.assertIs(adapter.manager, orig_manager) self.assertIs(self.logger.manager, orig_manager) class LoggerTest(BaseTest): def setUp(self): super(LoggerTest, self).setUp() self.recording = RecordingHandler() self.logger = logging.Logger(name='blah') self.logger.addHandler(self.recording) self.addCleanup(self.logger.removeHandler, self.recording) self.addCleanup(self.recording.close) self.addCleanup(logging.shutdown) def test_set_invalid_level(self): self.assertRaises(TypeError, self.logger.setLevel, object()) def test_exception(self): msg = 'testing exception: %r' exc = None try: 1 / 0 except ZeroDivisionError as e: exc = e self.logger.exception(msg, self.recording) self.assertEqual(len(self.recording.records), 1) record = self.recording.records[0] self.assertEqual(record.levelno, logging.ERROR) self.assertEqual(record.msg, msg) self.assertEqual(record.args, (self.recording,)) self.assertEqual(record.exc_info, (exc.__class__, exc, exc.__traceback__)) def test_log_invalid_level_with_raise(self): with support.swap_attr(logging, 'raiseExceptions', True): self.assertRaises(TypeError, self.logger.log, '10', 'test message') def test_log_invalid_level_no_raise(self): with support.swap_attr(logging, 'raiseExceptions', False): self.logger.log('10', 'test message') # no exception happens def test_find_caller_with_stack_info(self): called = [] support.patch(self, logging.traceback, 'print_stack', lambda f, file: called.append(file.getvalue())) self.logger.findCaller(stack_info=True) self.assertEqual(len(called), 1) self.assertEqual('Stack (most recent call last):\n', called[0]) def test_find_caller_with_stacklevel(self): the_level = 1 def innermost(): self.logger.warning('test', stacklevel=the_level) def inner(): innermost() def outer(): inner() records = self.recording.records outer() self.assertEqual(records[-1].funcName, 'innermost') lineno = records[-1].lineno the_level += 1 outer() self.assertEqual(records[-1].funcName, 'inner') self.assertGreater(records[-1].lineno, lineno) lineno = records[-1].lineno the_level += 1 outer() self.assertEqual(records[-1].funcName, 'outer') self.assertGreater(records[-1].lineno, lineno) lineno = records[-1].lineno the_level += 1 outer() self.assertEqual(records[-1].funcName, 'test_find_caller_with_stacklevel') self.assertGreater(records[-1].lineno, lineno) def test_make_record_with_extra_overwrite(self): name = 'my record' level = 13 fn = lno = msg = args = exc_info = func = sinfo = None rv = logging._logRecordFactory(name, level, fn, lno, msg, args, exc_info, func, sinfo) for key in ('message', 'asctime') + tuple(rv.__dict__.keys()): extra = {key: 'some value'} self.assertRaises(KeyError, self.logger.makeRecord, name, level, fn, lno, msg, args, exc_info, extra=extra, sinfo=sinfo) def test_make_record_with_extra_no_overwrite(self): name = 'my record' level = 13 fn = lno = msg = args = exc_info = func = sinfo = None extra = {'valid_key': 'some value'} result = self.logger.makeRecord(name, level, fn, lno, msg, args, exc_info, extra=extra, sinfo=sinfo) self.assertIn('valid_key', result.__dict__) def test_has_handlers(self): self.assertTrue(self.logger.hasHandlers()) for handler in self.logger.handlers: self.logger.removeHandler(handler) self.assertFalse(self.logger.hasHandlers()) def test_has_handlers_no_propagate(self): child_logger = logging.getLogger('blah.child') child_logger.propagate = False self.assertFalse(child_logger.hasHandlers()) def test_is_enabled_for(self): old_disable = self.logger.manager.disable self.logger.manager.disable = 23 self.addCleanup(setattr, self.logger.manager, 'disable', old_disable) self.assertFalse(self.logger.isEnabledFor(22)) def test_is_enabled_for_disabled_logger(self): old_disabled = self.logger.disabled old_disable = self.logger.manager.disable self.logger.disabled = True self.logger.manager.disable = 21 self.addCleanup(setattr, self.logger, 'disabled', old_disabled) self.addCleanup(setattr, self.logger.manager, 'disable', old_disable) self.assertFalse(self.logger.isEnabledFor(22)) def test_root_logger_aliases(self): root = logging.getLogger() self.assertIs(root, logging.root) self.assertIs(root, logging.getLogger(None)) self.assertIs(root, logging.getLogger('')) self.assertIs(root, logging.getLogger('root')) self.assertIs(root, logging.getLogger('foo').root) self.assertIs(root, logging.getLogger('foo.bar').root) self.assertIs(root, logging.getLogger('foo').parent) self.assertIsNot(root, logging.getLogger('\0')) self.assertIsNot(root, logging.getLogger('foo.bar').parent) def test_invalid_names(self): self.assertRaises(TypeError, logging.getLogger, any) self.assertRaises(TypeError, logging.getLogger, b'foo') def test_pickling(self): for proto in range(pickle.HIGHEST_PROTOCOL + 1): for name in ('', 'root', 'foo', 'foo.bar', 'baz.bar'): logger = logging.getLogger(name) s = pickle.dumps(logger, proto) unpickled = pickle.loads(s) self.assertIs(unpickled, logger) def test_caching(self): root = self.root_logger logger1 = logging.getLogger("abc") logger2 = logging.getLogger("abc.def") # Set root logger level and ensure cache is empty root.setLevel(logging.ERROR) self.assertEqual(logger2.getEffectiveLevel(), logging.ERROR) self.assertEqual(logger2._cache, {}) # Ensure cache is populated and calls are consistent self.assertTrue(logger2.isEnabledFor(logging.ERROR)) self.assertFalse(logger2.isEnabledFor(logging.DEBUG)) self.assertEqual(logger2._cache, {logging.ERROR: True, logging.DEBUG: False}) self.assertEqual(root._cache, {}) self.assertTrue(logger2.isEnabledFor(logging.ERROR)) # Ensure root cache gets populated self.assertEqual(root._cache, {}) self.assertTrue(root.isEnabledFor(logging.ERROR)) self.assertEqual(root._cache, {logging.ERROR: True}) # Set parent logger level and ensure caches are emptied logger1.setLevel(logging.CRITICAL) self.assertEqual(logger2.getEffectiveLevel(), logging.CRITICAL) self.assertEqual(logger2._cache, {}) # Ensure logger2 uses parent logger's effective level self.assertFalse(logger2.isEnabledFor(logging.ERROR)) # Set level to NOTSET and ensure caches are empty logger2.setLevel(logging.NOTSET) self.assertEqual(logger2.getEffectiveLevel(), logging.CRITICAL) self.assertEqual(logger2._cache, {}) self.assertEqual(logger1._cache, {}) self.assertEqual(root._cache, {}) # Verify logger2 follows parent and not root self.assertFalse(logger2.isEnabledFor(logging.ERROR)) self.assertTrue(logger2.isEnabledFor(logging.CRITICAL)) self.assertFalse(logger1.isEnabledFor(logging.ERROR)) self.assertTrue(logger1.isEnabledFor(logging.CRITICAL)) self.assertTrue(root.isEnabledFor(logging.ERROR)) # Disable logging in manager and ensure caches are clear logging.disable() self.assertEqual(logger2.getEffectiveLevel(), logging.CRITICAL) self.assertEqual(logger2._cache, {}) self.assertEqual(logger1._cache, {}) self.assertEqual(root._cache, {}) # Ensure no loggers are enabled self.assertFalse(logger1.isEnabledFor(logging.CRITICAL)) self.assertFalse(logger2.isEnabledFor(logging.CRITICAL)) self.assertFalse(root.isEnabledFor(logging.CRITICAL)) class BaseFileTest(BaseTest): "Base class for handler tests that write log files" def setUp(self): BaseTest.setUp(self) fd, self.fn = tempfile.mkstemp(".log", "test_logging-2-") os.close(fd) self.rmfiles = [] def tearDown(self): for fn in self.rmfiles: os.unlink(fn) if os.path.exists(self.fn): os.unlink(self.fn) BaseTest.tearDown(self) def assertLogFile(self, filename): "Assert a log file is there and register it for deletion" self.assertTrue(os.path.exists(filename), msg="Log file %r does not exist" % filename) self.rmfiles.append(filename) class FileHandlerTest(BaseFileTest): def test_delay(self): os.unlink(self.fn) fh = logging.FileHandler(self.fn, delay=True) self.assertIsNone(fh.stream) self.assertFalse(os.path.exists(self.fn)) fh.handle(logging.makeLogRecord({})) self.assertIsNotNone(fh.stream) self.assertTrue(os.path.exists(self.fn)) fh.close() class RotatingFileHandlerTest(BaseFileTest): def next_rec(self): return logging.LogRecord('n', logging.DEBUG, 'p', 1, self.next_message(), None, None, None) def test_should_not_rollover(self): # If maxbytes is zero rollover never occurs rh = logging.handlers.RotatingFileHandler(self.fn, maxBytes=0) self.assertFalse(rh.shouldRollover(None)) rh.close() def test_should_rollover(self): rh = logging.handlers.RotatingFileHandler(self.fn, maxBytes=1) self.assertTrue(rh.shouldRollover(self.next_rec())) rh.close() def test_file_created(self): # checks that the file is created and assumes it was created # by us rh = logging.handlers.RotatingFileHandler(self.fn) rh.emit(self.next_rec()) self.assertLogFile(self.fn) rh.close() def test_rollover_filenames(self): def namer(name): return name + ".test" rh = logging.handlers.RotatingFileHandler( self.fn, backupCount=2, maxBytes=1) rh.namer = namer rh.emit(self.next_rec()) self.assertLogFile(self.fn) rh.emit(self.next_rec()) self.assertLogFile(namer(self.fn + ".1")) rh.emit(self.next_rec()) self.assertLogFile(namer(self.fn + ".2")) self.assertFalse(os.path.exists(namer(self.fn + ".3"))) rh.close() def test_namer_rotator_inheritance(self): class HandlerWithNamerAndRotator(logging.handlers.RotatingFileHandler): def namer(self, name): return name + ".test" def rotator(self, source, dest): if os.path.exists(source): os.rename(source, dest + ".rotated") rh = HandlerWithNamerAndRotator( self.fn, backupCount=2, maxBytes=1) self.assertEqual(rh.namer(self.fn), self.fn + ".test") rh.emit(self.next_rec()) self.assertLogFile(self.fn) rh.emit(self.next_rec()) self.assertLogFile(rh.namer(self.fn + ".1") + ".rotated") self.assertFalse(os.path.exists(rh.namer(self.fn + ".1"))) rh.close() @support.requires_zlib def test_rotator(self): def namer(name): return name + ".gz" def rotator(source, dest): with open(source, "rb") as sf: data = sf.read() compressed = zlib.compress(data, 9) with open(dest, "wb") as df: df.write(compressed) os.remove(source) rh = logging.handlers.RotatingFileHandler( self.fn, backupCount=2, maxBytes=1) rh.rotator = rotator rh.namer = namer m1 = self.next_rec() rh.emit(m1) self.assertLogFile(self.fn) m2 = self.next_rec() rh.emit(m2) fn = namer(self.fn + ".1") self.assertLogFile(fn) newline = os.linesep with open(fn, "rb") as f: compressed = f.read() data = zlib.decompress(compressed) self.assertEqual(data.decode("ascii"), m1.msg + newline) rh.emit(self.next_rec()) fn = namer(self.fn + ".2") self.assertLogFile(fn) with open(fn, "rb") as f: compressed = f.read() data = zlib.decompress(compressed) self.assertEqual(data.decode("ascii"), m1.msg + newline) rh.emit(self.next_rec()) fn = namer(self.fn + ".2") with open(fn, "rb") as f: compressed = f.read() data = zlib.decompress(compressed) self.assertEqual(data.decode("ascii"), m2.msg + newline) self.assertFalse(os.path.exists(namer(self.fn + ".3"))) rh.close() class TimedRotatingFileHandlerTest(BaseFileTest): # other test methods added below def test_rollover(self): fh = logging.handlers.TimedRotatingFileHandler(self.fn, 'S', backupCount=1) fmt = logging.Formatter('%(asctime)s %(message)s') fh.setFormatter(fmt) r1 = logging.makeLogRecord({'msg': 'testing - initial'}) fh.emit(r1) self.assertLogFile(self.fn) time.sleep(1.1) # a little over a second ... r2 = logging.makeLogRecord({'msg': 'testing - after delay'}) fh.emit(r2) fh.close() # At this point, we should have a recent rotated file which we # can test for the existence of. However, in practice, on some # machines which run really slowly, we don't know how far back # in time to go to look for the log file. So, we go back a fair # bit, and stop as soon as we see a rotated file. In theory this # could of course still fail, but the chances are lower. found = False now = datetime.datetime.now() GO_BACK = 5 * 60 # seconds for secs in range(GO_BACK): prev = now - datetime.timedelta(seconds=secs) fn = self.fn + prev.strftime(".%Y-%m-%d_%H-%M-%S") found = os.path.exists(fn) if found: self.rmfiles.append(fn) break msg = 'No rotated files found, went back %d seconds' % GO_BACK if not found: # print additional diagnostics dn, fn = os.path.split(self.fn) files = [f for f in os.listdir(dn) if f.startswith(fn)] print('Test time: %s' % now.strftime("%Y-%m-%d %H-%M-%S"), file=sys.stderr) print('The only matching files are: %s' % files, file=sys.stderr) for f in files: print('Contents of %s:' % f) path = os.path.join(dn, f) with open(path, 'r') as tf: print(tf.read()) self.assertTrue(found, msg=msg) def test_invalid(self): assertRaises = self.assertRaises assertRaises(ValueError, logging.handlers.TimedRotatingFileHandler, self.fn, 'X', delay=True) assertRaises(ValueError, logging.handlers.TimedRotatingFileHandler, self.fn, 'W', delay=True) assertRaises(ValueError, logging.handlers.TimedRotatingFileHandler, self.fn, 'W7', delay=True) def test_compute_rollover_daily_attime(self): currentTime = 0 atTime = datetime.time(12, 0, 0) rh = logging.handlers.TimedRotatingFileHandler( self.fn, when='MIDNIGHT', interval=1, backupCount=0, utc=True, atTime=atTime) try: actual = rh.computeRollover(currentTime) self.assertEqual(actual, currentTime + 12 * 60 * 60) actual = rh.computeRollover(currentTime + 13 * 60 * 60) self.assertEqual(actual, currentTime + 36 * 60 * 60) finally: rh.close() #@unittest.skipIf(True, 'Temporarily skipped while failures investigated.') def test_compute_rollover_weekly_attime(self): currentTime = int(time.time()) today = currentTime - currentTime % 86400 atTime = datetime.time(12, 0, 0) wday = time.gmtime(today).tm_wday for day in range(7): rh = logging.handlers.TimedRotatingFileHandler( self.fn, when='W%d' % day, interval=1, backupCount=0, utc=True, atTime=atTime) try: if wday > day: # The rollover day has already passed this week, so we # go over into next week expected = (7 - wday + day) else: expected = (day - wday) # At this point expected is in days from now, convert to seconds expected *= 24 * 60 * 60 # Add in the rollover time expected += 12 * 60 * 60 # Add in adjustment for today expected += today actual = rh.computeRollover(today) if actual != expected: print('failed in timezone: %d' % time.timezone) print('local vars: %s' % locals()) self.assertEqual(actual, expected) if day == wday: # goes into following week expected += 7 * 24 * 60 * 60 actual = rh.computeRollover(today + 13 * 60 * 60) if actual != expected: print('failed in timezone: %d' % time.timezone) print('local vars: %s' % locals()) self.assertEqual(actual, expected) finally: rh.close() def secs(**kw): return datetime.timedelta(**kw) // datetime.timedelta(seconds=1) for when, exp in (('S', 1), ('M', 60), ('H', 60 * 60), ('D', 60 * 60 * 24), ('MIDNIGHT', 60 * 60 * 24), # current time (epoch start) is a Thursday, W0 means Monday ('W0', secs(days=4, hours=24)), ): def test_compute_rollover(self, when=when, exp=exp): rh = logging.handlers.TimedRotatingFileHandler( self.fn, when=when, interval=1, backupCount=0, utc=True) currentTime = 0.0 actual = rh.computeRollover(currentTime) if exp != actual: # Failures occur on some systems for MIDNIGHT and W0. # Print detailed calculation for MIDNIGHT so we can try to see # what's going on if when == 'MIDNIGHT': try: if rh.utc: t = time.gmtime(currentTime) else: t = time.localtime(currentTime) currentHour = t[3] currentMinute = t[4] currentSecond = t[5] # r is the number of seconds left between now and midnight r = logging.handlers._MIDNIGHT - ((currentHour * 60 + currentMinute) * 60 + currentSecond) result = currentTime + r print('t: %s (%s)' % (t, rh.utc), file=sys.stderr) print('currentHour: %s' % currentHour, file=sys.stderr) print('currentMinute: %s' % currentMinute, file=sys.stderr) print('currentSecond: %s' % currentSecond, file=sys.stderr) print('r: %s' % r, file=sys.stderr) print('result: %s' % result, file=sys.stderr) except Exception: print('exception in diagnostic code: %s' % sys.exc_info()[1], file=sys.stderr) self.assertEqual(exp, actual) rh.close() setattr(TimedRotatingFileHandlerTest, "test_compute_rollover_%s" % when, test_compute_rollover) @unittest.skipUnless(win32evtlog, 'win32evtlog/win32evtlogutil/pywintypes required for this test.') class NTEventLogHandlerTest(BaseTest): def test_basic(self): logtype = 'Application' elh = win32evtlog.OpenEventLog(None, logtype) num_recs = win32evtlog.GetNumberOfEventLogRecords(elh) try: h = logging.handlers.NTEventLogHandler('test_logging') except pywintypes.error as e: if e.winerror == 5: # access denied raise unittest.SkipTest('Insufficient privileges to run test') raise r = logging.makeLogRecord({'msg': 'Test Log Message'}) h.handle(r) h.close() # Now see if the event is recorded self.assertLess(num_recs, win32evtlog.GetNumberOfEventLogRecords(elh)) flags = win32evtlog.EVENTLOG_BACKWARDS_READ | \ win32evtlog.EVENTLOG_SEQUENTIAL_READ found = False GO_BACK = 100 events = win32evtlog.ReadEventLog(elh, flags, GO_BACK) for e in events: if e.SourceName != 'test_logging': continue msg = win32evtlogutil.SafeFormatMessage(e, logtype) if msg != 'Test Log Message\r\n': continue found = True break msg = 'Record not found in event log, went back %d records' % GO_BACK self.assertTrue(found, msg=msg) class MiscTestCase(unittest.TestCase): def test__all__(self): blacklist = {'logThreads', 'logMultiprocessing', 'logProcesses', 'currentframe', 'PercentStyle', 'StrFormatStyle', 'StringTemplateStyle', 'Filterer', 'PlaceHolder', 'Manager', 'RootLogger', 'root', 'threading'} support.check__all__(self, logging, blacklist=blacklist) # Set the locale to the platform-dependent default. I have no idea # why the test does this, but in any case we save the current locale # first and restore it at the end. @support.run_with_locale('LC_ALL', '') def test_main(): tests = [ BuiltinLevelsTest, BasicFilterTest, CustomLevelsAndFiltersTest, HandlerTest, MemoryHandlerTest, ConfigFileTest, SocketHandlerTest, DatagramHandlerTest, MemoryTest, EncodingTest, WarningsTest, ConfigDictTest, ManagerTest, FormatterTest, BufferingFormatterTest, StreamHandlerTest, LogRecordFactoryTest, ChildLoggerTest, QueueHandlerTest, ShutdownTest, ModuleLevelMiscTest, BasicConfigTest, LoggerAdapterTest, LoggerTest, SMTPHandlerTest, FileHandlerTest, RotatingFileHandlerTest, LastResortTest, LogRecordTest, ExceptionTest, SysLogHandlerTest, IPv6SysLogHandlerTest, HTTPHandlerTest, NTEventLogHandlerTest, TimedRotatingFileHandlerTest, UnixSocketHandlerTest, UnixDatagramHandlerTest, UnixSysLogHandlerTest, MiscTestCase ] if hasattr(logging.handlers, 'QueueListener'): tests.append(QueueListenerTest) support.run_unittest(*tests) if __name__ == "__main__": test_main()
example3.py
# ch15/example3.py import time import threading from multiprocessing import Pool COUNT = 50000000 def countdown(n): while n > 0: n -= 1 if __name__ == '__main__': ####################################################################### # Sequential start = time.time() countdown(COUNT) print('Sequential program finished.') print(f'Took {time.time() - start : .2f} seconds.') print() ####################################################################### # Multithreading thread1 = threading.Thread(target=countdown, args=(COUNT // 2,)) thread2 = threading.Thread(target=countdown, args=(COUNT // 2,)) start = time.time() thread1.start() thread2.start() thread1.join() thread2.join() print('Multithreading program finished.') print(f'Took {time.time() - start : .2f} seconds.') print() ####################################################################### # Multiprocessing pool = Pool(processes=2) start = time.time() pool.apply_async(countdown, args=(COUNT//2,)) pool.apply_async(countdown, args=(COUNT//2,)) pool.close() pool.join() print('Multiprocessing program finished.') print(f'Took {time.time() - start : .2f} seconds.')
SparkMQTTStage1_1.py
######################################################################## # This is implementation for Cloud + Edge = iotx Stage 1. Cloud is represented # by Apache Spark and Edge computing framework is Calvin. Apache Spark is # receiving temperature data from Calvin via MQTT (pub/sub model). This # program calculates running average using windowing and sliding interval # technique and sends the result back to Calvin via MQTT. Makes use of Spark # mqtt assembly package to connect to MQTT broker to collect data and Paho # MQTT client package is used to publish data. # # iotx stage 1 demo # # Author: Aarti Gorade # Email: ahg1512@rit.edu # # Invocation: # # Docker image: aarti/sparkstage1-iotx # Docker file: DockerfileSparkMQTTStage1 # # OR # # Command line: # ./sbin/start-master.sh # ./bin/spark-class org.apache.spark.deploy.worker.Worker spark://<Spark # Master's Ip address>:<Spark Master's Port> # ./bin/spark-submit # --packages org.apache.spark:spark-streaming-mqtt-assembly_2.11:1.5.0 # python/SparkMQTTStage1.py # ######################################################################## import os import socket from collections import deque from threading import Thread from time import sleep import paho.mqtt.client as mqtt from pyspark import SparkContext from pyspark.streaming import StreamingContext from pyspark.streaming.mqtt import MQTTUtils # MQTT client mqttc = None # Queue to store calculated average values queue = deque([]) # Spark Broker details sparkBroker = "iot.eclipse.org" sparkPort = 1883 sparkTopic = "edu/rit/iotx/cloud/average/temperature" # Calvin broker URI brokerUrl = "tcp://iot.eclipse.org:1883" # Topic pattern where temperature data is being sent topic = "edu/rit/iotx/+/temperature" # counters to keep track of running sum and count to calculate average value sumAccum = 0 countAccum = 0 # window and sliding interval using for calculating average over each window of # incoming Spar Stream windowInterval = 30 slidingInterval = 15 def getHostIpAddress(): """ Get global Ip Address of the current machine :return: Ip address """ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect(("8.8.8.8", 80)) ip = s.getsockname()[0] s.close() return ip # Ip address and port number for Spark cluster hostAddress = getHostIpAddress() hostPort = "7077" def connectToBroker(broker, port): """ This is the function responsible for creating MQTT client and connecting to the give broker server on desired port :param broker: broker server :param port: port to connect to :return: None """ global mqttc mqttc = mqtt.Client() print "Trying to connect to broker..." mqttc.connect(broker, port) print "Successfully connected!!!" def addToQueue(rdd): """ This is the function responsible for adding calculated average values into the queue :param rdd: RDD containing calculated average values :return: None """ rddList = rdd.collect() subList = [float(x[0]) for x in rddList] global queue queue.extend(subList) def publishFromQueue(): """ This is the function responsible for fetching data from queue and publishing it using MQTT :return: None """ global mqttc global queue mqttClient = mqttc while True: while not (queue): sleep(slidingInterval) data = queue.popleft() print(data) mqttClient.publish(sparkTopic, data) def update(x): """ Add the incoming new item in current sliding window interval into the sum :param x: new value :return: current average value """ global sumAccum global countAccum sumAccum += x countAccum += 1 return (sumAccum / countAccum) def reverseUpdate(x): """ Remove item from old sliding window interval from current sum :param x: old item from last window interval :return: current average value """ global sumAccum global countAccum sumAccum -= x countAccum -= 1 return (sumAccum / countAccum) if __name__ == "__main__": """ This is the main function responsible for calculating average of input data stream pe window and publishing calculated average values for Calvin client usage to perform further processing using Sensors or Actuators """ # Load spark streaming mqtt package at runtime SUBMIT_ARGS = "--packages " \ "org.apache.spark:spark-streaming-mqtt-assembly_2.11:1.5.0 " \ "pyspark-shell" os.environ["PYSPARK_SUBMIT_ARGS"] = SUBMIT_ARGS # connect to Spark cluster "spark:cluster-host:port" sc = SparkContext("spark://" + hostAddress + ":" + hostPort, appName="iotx") sc.setLogLevel("ERROR") print("Created Streaming context...") ssc = StreamingContext(sc, 15) # mandatory to store checkpointed data for Spark Streaming # temp ssc.checkpoint("../tmp/SparkCheckpointedData") print("Creating MQTT stream...") mqttStream = MQTTUtils.createStream(ssc, brokerUrl, topic) # split incoming stream based on space celsiusTemp = mqttStream.map(lambda line: line.split(" ")) # Convert Celsius to Farenheit and store each value in pair format farenheitTemp = celsiusTemp.map( lambda temp: (str((float(temp[0]) * 9 / 5) + 32).decode("utf-8"), 1)) # lambda functions to calculate average using windowing technique update_1 = lambda x, y: update(x) reverseUpdate_1 = lambda x, y: reverseUpdate(x) # Reduce last 30 seconds of data, every 15 seconds windowedWordCounts = farenheitTemp.reduceByKeyAndWindow(update_1, reverseUpdate_1, windowInterval, slidingInterval) # connect to broker connectToBroker(sparkBroker, sparkPort) # foreachRDD is Action. Add each RDD containing average values into the # queue windowedWordCounts.foreachRDD(addToQueue) # create worker thread to fetch data from queue and publish it to broker # using MQTT worker = Thread(target=publishFromQueue) worker.setDaemon(True) worker.start() # Start spark streaming jobs print("\n\n SSC starting ...") ssc.start() print("\n\n SSC waiting for termination...") # wait for 100 seconds before terminating Spark job execution ssc.awaitTermination()
main.py
from bin.world import search; from colorama import Fore; import multiprocessing as mp; import datetime, random, os; THREADS = int(os.environ.get('THREADS', 4)); RADIUS = int(os.environ.get('RADIUS', 2500)); MIN_SIZE = int(os.environ.get('MIN_SIZE', 18)); SPACING = int(os.environ.get('SPACING', 3)); # Search random seed def search_seeds(): while True: seed = random.randint(-9223372036854775808, 9223372036854775807); #start = datetime.datetime.now(); print(f"{Fore.RESET}Searching: {Fore.YELLOW}{str(seed)}{Fore.RESET}"); clusters = search(seed=seed, radius=RADIUS*2, min_size=MIN_SIZE, spacing=SPACING); #print(datetime.datetime.now() - start); # Log starting message print(f'{Fore.RESET}Starting slime chunk finder...\nThreads: {THREADS}\nRadius: {RADIUS}\nMinimum chunk size: {MIN_SIZE}\nSpacing optimization: {SPACING}\n'); # Start threads for i in range(THREADS): mp.Process(target=search_seeds).start();
test_lock.py
""" Copyright (c) 2008-2017, Jesus Cea Avion <jcea@jcea.es> All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of Jesus Cea Avion nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ """ TestCases for testing the locking sub-system. """ import time import unittest from .test_all import db, test_support, verbose, have_threads, \ get_new_environment_path, get_new_database_path if have_threads : from threading import Thread import sys if sys.version_info[0] < 3 : from threading import currentThread else : from threading import current_thread as currentThread #---------------------------------------------------------------------- class LockingTestCase(unittest.TestCase): def setUp(self): self.homeDir = get_new_environment_path() self.env = db.DBEnv() self.env.open(self.homeDir, db.DB_THREAD | db.DB_INIT_MPOOL | db.DB_INIT_LOCK | db.DB_CREATE) def tearDown(self): self.env.close() test_support.rmtree(self.homeDir) def test01_simple(self): if verbose: print('\n', '-=' * 30) print("Running %s.test01_simple..." % self.__class__.__name__) anID = self.env.lock_id() if verbose: print("locker ID: %s" % anID) lock = self.env.lock_get(anID, "some locked thing", db.DB_LOCK_WRITE) if verbose: print("Aquired lock: %s" % lock) self.env.lock_put(lock) if verbose: print("Released lock: %s" % lock) self.env.lock_id_free(anID) def test02_threaded(self): if verbose: print('\n', '-=' * 30) print("Running %s.test02_threaded..." % self.__class__.__name__) threads = [] threads.append(Thread(target = self.theThread, args=(db.DB_LOCK_WRITE,))) threads.append(Thread(target = self.theThread, args=(db.DB_LOCK_READ,))) threads.append(Thread(target = self.theThread, args=(db.DB_LOCK_READ,))) threads.append(Thread(target = self.theThread, args=(db.DB_LOCK_WRITE,))) threads.append(Thread(target = self.theThread, args=(db.DB_LOCK_READ,))) threads.append(Thread(target = self.theThread, args=(db.DB_LOCK_READ,))) threads.append(Thread(target = self.theThread, args=(db.DB_LOCK_WRITE,))) threads.append(Thread(target = self.theThread, args=(db.DB_LOCK_WRITE,))) threads.append(Thread(target = self.theThread, args=(db.DB_LOCK_WRITE,))) for t in threads: import sys if sys.version_info[0] < 3 : t.setDaemon(True) else : t.daemon = True t.start() for t in threads: t.join() def test03_lock_timeout(self): self.env.set_timeout(0, db.DB_SET_LOCK_TIMEOUT) self.assertEqual(self.env.get_timeout(db.DB_SET_LOCK_TIMEOUT), 0) self.env.set_timeout(0, db.DB_SET_TXN_TIMEOUT) self.assertEqual(self.env.get_timeout(db.DB_SET_TXN_TIMEOUT), 0) self.env.set_timeout(123456, db.DB_SET_LOCK_TIMEOUT) self.assertEqual(self.env.get_timeout(db.DB_SET_LOCK_TIMEOUT), 123456) self.env.set_timeout(7890123, db.DB_SET_TXN_TIMEOUT) self.assertEqual(self.env.get_timeout(db.DB_SET_TXN_TIMEOUT), 7890123) def test04_lock_timeout2(self): self.env.set_timeout(0, db.DB_SET_LOCK_TIMEOUT) self.env.set_timeout(0, db.DB_SET_TXN_TIMEOUT) self.env.set_timeout(123456, db.DB_SET_LOCK_TIMEOUT) self.env.set_timeout(7890123, db.DB_SET_TXN_TIMEOUT) def deadlock_detection() : while not deadlock_detection.end : deadlock_detection.count = \ self.env.lock_detect(db.DB_LOCK_EXPIRE) if deadlock_detection.count : while not deadlock_detection.end : pass break time.sleep(0.01) deadlock_detection.end=False deadlock_detection.count=0 t=Thread(target=deadlock_detection) import sys if sys.version_info[0] < 3 : t.setDaemon(True) else : t.daemon = True t.start() self.env.set_timeout(100000, db.DB_SET_LOCK_TIMEOUT) anID = self.env.lock_id() anID2 = self.env.lock_id() self.assertNotEqual(anID, anID2) lock = self.env.lock_get(anID, "shared lock", db.DB_LOCK_WRITE) start_time=time.time() self.assertRaises(db.DBLockNotGrantedError, self.env.lock_get,anID2, "shared lock", db.DB_LOCK_READ) end_time=time.time() deadlock_detection.end=True # Floating point rounding self.assertTrue((end_time-start_time) >= 0.0999) self.env.lock_put(lock) t.join() self.env.lock_id_free(anID) self.env.lock_id_free(anID2) self.assertTrue(deadlock_detection.count>0) def theThread(self, lockType): import sys if sys.version_info[0] < 3 : name = currentThread().getName() else : name = currentThread().name if lockType == db.DB_LOCK_WRITE: lt = "write" else: lt = "read" anID = self.env.lock_id() if verbose: print("%s: locker ID: %s" % (name, anID)) for i in range(1000) : lock = self.env.lock_get(anID, "some locked thing", lockType) if verbose: print("%s: Aquired %s lock: %s" % (name, lt, lock)) self.env.lock_put(lock) if verbose: print("%s: Released %s lock: %s" % (name, lt, lock)) self.env.lock_id_free(anID) #---------------------------------------------------------------------- def test_suite(): suite = unittest.TestSuite() if have_threads: suite.addTest(unittest.makeSuite(LockingTestCase)) else: suite.addTest(unittest.makeSuite(LockingTestCase, 'test01')) return suite if __name__ == '__main__': unittest.main(defaultTest='test_suite')
run_agentFCL.py
from __future__ import print_function import numpy as np import cv2 import sys import os sys.path.append('./agent') from agent.doom_simulator import DoomSimulator import feedforward_closedloop_learning import threading from matplotlib import pyplot as plt from datetime import datetime import random width = 160 widthIn = 160 height = 120 heightIn = 120 nOut = 6 neuronsPerLayer = [5,nOut] outFile = open("FCLOutput.txt", "w") wtdistFile = open("wtDist.txt", "w") FCLNet = feedforward_closedloop_learning.FeedforwardClosedloopLearning(width * height, neuronsPerLayer) # init the weights # FCLNet.getLayer(0).setConvolution(width, height) FCLNet.initWeights(1., feedforward_closedloop_learning.Neuron.MAX_OUTPUT_RANDOM) print ("Initialised weights") for i in range(len(neuronsPerLayer)): print ("hidden ", i, ": ", neuronsPerLayer[i], file=outFile) #print("learning rate: ", learningRate, file=outFile) FCLNet.setBias(1) FCLNet.setMomentum(0.5) random.seed(datetime.now()) FCLNet.seedRandom(np.random.randint(low=0, high=999999)) preprocess_input_images = lambda x: x / 255. - 0.5 sharpen = np.array(( [0, 1, 0], [1, 4, 1], [0, 1, 0]), dtype="int") edge = np.array(( [0, 1, 0], [1, -4, 1], [0, 1, 0]), dtype="int") plt.ion() plt.show() ln1 = False ln2 = [False,False,False,False] def getWeights2D(neuron): n_neurons = FCLNet.getLayer(0).getNneurons() n_inputs = FCLNet.getLayer(0).getNeuron(neuron).getNinputs() weights = np.zeros(n_inputs) for i in range(n_inputs): if FCLNet.getLayer(0).getNeuron(neuron).getMask(i): weights[i] = FCLNet.getLayer(0).getNeuron(neuron).getAvgWeight(i) else: weights[i] = np.nan return weights.reshape(heightIn,widthIn) def getWeights1D(layer,neuron): n_neurons = FCLNet.getLayer(layer).getNneurons() n_inputs = FCLNet.getLayer(layer).getNeuron(neuron).getNinputs() weights = np.zeros(n_inputs) for i in range(n_inputs): weights[i] = FCLNet.getLayer(layer).getNeuron(neuron).getAvgWeight(i) return weights def plotWeights(): global ln1 global ln2 print("** Update plot") while True: if ln1: ln1.remove() plt.figure(1) w1 = getWeights2D(0) for i in range(1,FCLNet.getLayer(0).getNneurons()): w2 = getWeights2D(i) w1 = np.where(np.isnan(w2),w1,w2) ln1 = plt.imshow(w1,cmap='gray') plt.draw() plt.pause(0.1) for j in range(1,FCLNet.getNumHidLayers()+1): if ln2[j]: ln2[j].remove() plt.figure(j+1) w1 = np.zeros( (FCLNet.getLayer(j).getNneurons(),FCLNet.getLayer(j).getNeuron(0).getNinputs()) ) for i in range(FCLNet.getLayer(j).getNneurons()): w1[i,:] = getWeights1D(j,i) ln2[j] = plt.imshow(w1,cmap='gray') plt.draw() plt.pause(5.0) # uncomment if you want to plot the weights as they evolve #t1 = threading.Thread(target=plotWeights) #t1.start() def getColourImbalance(img, colour): if(img.shape[0]) != 3: print("Error in getColourImbalance: wrong number of image channels: ", img.shape) return 0. width = int(img.shape[2]/2) height = int(img.shape[1]/2) print ("width: ", width, "height", height) avgLeft = np.average(img[:,:,:width], axis=1) avgLeft = np.average(avgLeft, axis=1) # avgLeft = np.dot(avgLeft, colour) avgRight = np.average(img[:,:,width:], axis=1) avgRight = np.average(avgRight, axis=1) # avgRight = np.dot(avgRight, colour) avgTop = np.average(img[:, :height, :], axis=1) avgTop = np.average(avgTop, axis=1) # avgTop = np.dot(avgTop, colour) avgBottom = np.average(img[:, height:, :], axis=1) avgBottom = np.average(avgBottom, axis=1) # avgBottom = np.dot(avgBottom, colour) print("avgLeft: ", avgLeft, " avgRight: ", avgRight, "avgTop", avgTop, "avgBottom", avgBottom) return 1. def getMaxColourPos(img, colour, step): cv2.imwrite("/tmp/col-" + str(step) + ".jpg", img) img = np.array(img, dtype='float64') width = int(img.shape[1]) height = int(img.shape[0]) # img[:,10,10] = [0,0,255] diff = np.ones(img.shape) diff[:,:,0] = colour[0] diff[:,:,1] = colour[1] diff[:,:,2] = colour[2] diff = np.absolute(np.add(diff, (-1*img))) cv2.imwrite("/tmp/diff-" + ".jpg", diff) diff = np.sum(diff, axis=2) indx = np.argmin(diff) indx0 = int(indx / width) indx1 = indx % width pts = np.asarray(np.where((np.mean(diff) - diff) > 150)) if (pts.shape[1]>0): bottomLeft = np.array([np.amin(pts[1]), np.amin(pts[0])]) topRight = np.array([np.amax(pts[1]), np.amax(pts[0])]) else: bottomLeft = [] topRight = [] return np.array([indx1, indx0]), bottomLeft, topRight, np.mean(diff) - diff[indx0,indx1] def savePosImage(curr_step, centre, x1, y1, x2, y2, _img, myFile, width, height): myFile.write("/tmp/" + str(curr_step) + ".jpg" + " 1" + " " + str(x1) + " " + str(y1) + " " + str(x2) + " " + str(y2) + "\n") img = np.zeros(_img.shape,dtype=np.uint8) outImage = Image.fromarray(img) outImage.save("/tmp/" + str(curr_step) + ".jpg") def saveImage(curr_step, _img): cv2.imwrite("/tmp/FCL-" + str(curr_step) + ".jpg", _img) def saveNegImage(curr_step, img2, myFile, width, height): myFile.write("/tmp/" + str(curr_step) + ".jpg\n") img = Image.fromarray(img2) img.save("/tmp/" + str(curr_step) + ".jpg") def main(learning_rate_): learningRate = float(learning_rate_) FCLNet.setLearningRate(learningRate) print("learning rate ", learningRate, file=outFile) ## Simulator simulator_args = {} simulator_args['config'] = 'config/config.cfg' simulator_args['resolution'] = (widthIn,heightIn) simulator_args['frame_skip'] = 1 simulator_args['color_mode'] = 'RGB24' simulator_args['game_args'] = "+name FCL +colorset 7" historyLen = 3 print ("HistoryLen: ", historyLen) print('starting simulator') simulator = DoomSimulator(simulator_args) num_channels = simulator.num_channels print('started simulator') modelDir = os.path.join(os.path.expanduser("~"), "Dev/GameAI/vizdoom_cig2017/icodoom/ICO1/Models") img_buffer = np.zeros( (historyLen, simulator.resolution[1], simulator.resolution[0], num_channels), dtype='uint8') meas_buffer = np.zeros((historyLen, simulator.num_meas)) act_buffer = np.zeros((historyLen, 7)) curr_step = 0 term = False diff_z = 0 iter = 1 epoch = 200 radialFlowLeft = 30. radialFlowRight = 30. radialFlowInertia = 0.4 radialGain = 4. rotationGain = 50. errorThresh = 10. updatePtsFreq = 50 reflexGain = 1E-3 flowGain = 0. netGain = 40. reflexReduceGain = -0.05 # create masks for left and right visual fields - note that these only cover the upper half of the image # this is to help prevent the tracking getting confused by the floor pattern half_height = round(height/2) half_width = round(width/2) maskLeft = np.zeros([height, width], np.uint8) maskLeft[half_height:, :half_width] = 1. maskRight = np.zeros([height, width], np.uint8) maskRight[half_height:, half_width:] = 1. lk_params = dict(winSize=(15, 15), maxLevel=2, criteria=(cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 10, 0.03)) feature_params = dict(maxCorners=500, qualityLevel=0.03, minDistance=7, blockSize=7) imgCentre = np.array([int(simulator_args['resolution'][0] / 2), int(simulator_args['resolution'][1] /2)]) print ("Image centre: ", imgCentre) rawInputs = np.zeros((height, width)) cheatInputs = np.zeros((width, height)) input_buff = np.zeros((width*height)) target_buff = np.zeros((1,1)) meas_buff = np.zeros((1,simulator.num_meas)) netOut = 0. netErr = np.zeros(neuronsPerLayer[0]) delta = 0. shoot = 0 wtDist = np.zeros(FCLNet.getNumLayers()) reflexOn = False iter = 0 killed = False # FCLNet.saveModel("Models/hack.txt") while not term: if curr_step < historyLen: curr_act = np.zeros(7).tolist() img, meas, rwrd, term = simulator.step(curr_act) print("Image: ", img.shape, " max: ", np.amax(img), " min: ", np.amin(img)) if curr_step == 0: p0Left = cv2.goodFeaturesToTrack(img[:,:,0], mask=maskLeft, **feature_params) p0Right = cv2.goodFeaturesToTrack(img[:,:,0], mask=maskRight, **feature_params) img_buffer[curr_step % historyLen] = img meas_buffer[curr_step % historyLen] = meas act_buffer[curr_step % historyLen] = curr_act[:7] else: img1 = img_buffer[(curr_step-2) % historyLen,:,:,:] img2 = img_buffer[(curr_step-1) % historyLen,:,:,:] state = simulator._game.get_state() stateImg = state.screen_buffer icoSteer = 0. if curr_step > 100: health = meas[1] if (health<0.1): reflexOn = False iter = 0 if (simulator._game.is_player_dead()) and killed == False: g = open("KD.txt", "a") s = "0 " + str(curr_step) + " " + str(datetime.now().timestamp()) + "\n" g.write(s) g.close() killed = True print("KILLED") if (not(simulator._game.is_player_dead())): killed = False # Don't run any networks when the player is dead! if (health < 101. and health > 0.): icoInSteer = 0. saveImage(curr_step, stateImg) centre, bottomLeft, topRight, colourStrength = getMaxColourPos(stateImg, [0, 0, 255], curr_step) colourSteer = imgCentre[0] if(len(bottomLeft)>0 and len(topRight)>0 and ((topRight[0] - bottomLeft[0]) < width/3) and ((topRight[1] - bottomLeft[1]) < height/2)): colourSteer = bottomLeft[0] + int(0.5 * (topRight[0] - bottomLeft[0])) shoot = 1 rawInputs = np.array(np.sum(stateImg, axis=2) / 3) input_buff[:] = np.ndarray.flatten(rawInputs) input_buff = input_buff - np.mean(input_buff) input_buff = input_buff / np.sqrt(np.var(input_buff)) # we want the reflex to be delayed wrt to the image input, so that the image is. Otherwise the learning can # never reduce the error to zero no matter how good the controller. oldDelta = delta if (iter>2): delta = (float(colourSteer) - float(imgCentre[0]))/float(width) else: delta = 0 deltaDiff = delta - oldDelta if(iter>2): if(np.abs(delta) > 0.01): shoot = 0 netErr[:] = delta target_buff[...] = delta + netOut meas_buff[0,:] = meas FCLNet.setLearningRate(0.) FCLNet.doStep(input_buff, netErr) netOut = FCLNet.getOutput(0) + 0.3*FCLNet.getOutput(1) + 0.1*FCLNet.getOutput(2) netOut1 = FCLNet.getOutput(3) + 0.3*FCLNet.getOutput(4) + 0.1*FCLNet.getOutput(5) netErr += reflexReduceGain * netGain * (netOut - netOut1) FCLNet.setLearningRate(learningRate) FCLNet.doStep(input_buff, netErr) netOut = FCLNet.getOutput(0) + 0.3*FCLNet.getOutput(1) + 0.1*FCLNet.getOutput(2) netOut1 = FCLNet.getOutput(3) + 0.3*FCLNet.getOutput(4) + 0.1*FCLNet.getOutput(5) # print("%s" % (" SHOOT " if shoot == 1 else " "), deltaDiff, delta, netOut) for i in range(FCLNet.getNumLayers()): wtDist[i] = FCLNet.getLayer(i).getWeightDistanceFromInitialWeights() print(curr_step, delta, netErr[0], netOut-netOut1, health, file=outFile) print(' '.join(map(str, wtDist)), file=wtdistFile) diff_theta = 0.6 * max(min((icoInSteer), 5.), -5.) netErr[:] = 0. diff_theta = diff_theta + reflexGain * colourStrength * delta curr_act = np.zeros(7).tolist() curr_act[0] = 0 curr_act[1] = 0 curr_act[2] = shoot curr_act[3] = curr_act[3] + diff_z curr_act[4] = 0 curr_act[5] = 0. curr_act[6] = diff_theta + netGain*(netOut - netOut1) iter += 1 if (curr_step % epoch == 0): # uncomment to write models to file """ if not os.path.exists("Models"): os.makedirs("Models") FCLNet.saveModel("Models/BP-" + str(curr_step) + ".txt") file = open("Models/checkpoint", 'w') file.write("Models/BP-" + str(curr_step) + ".txt") file.close() """ img, meas, rwrd, term = simulator.step(curr_act) if (not (meas is None)) and meas[0] > 30.: meas[0] = 30. if not term: img_buffer[curr_step % historyLen] = img meas_buffer[curr_step % historyLen] = meas act_buffer[curr_step % historyLen] = curr_act[:7] curr_step += 1 simulator.close_game() outFile.close() wtdistFile.close() if __name__ == '__main__': if(len(sys.argv) == 2): print("learning rate: ", str(sys.argv[1])) main(sys.argv[1]) else: print("usage: run_agentFCL learning_rate")
race.py
import threading from time import sleep from random import random counter = 0 randsleep = lambda: sleep(0.1 * random()) def incr(n): global counter for count in range(n): current = counter randsleep() counter = current + 1 randsleep() n = 5 t1 = threading.Thread(target=incr, args=(n, )) t2 = threading.Thread(target=incr, args=(n, )) t1.start() t2.start() t1.join() t2.join() print(f'Counter: {counter}') """ $ python race.py Counter: 6 # varies between different runs """
sensor_daemon.py
''' Created on 2013.09.07. ''' from threading import Thread import time import logging import os from node.sensor.sensor_type import SensorType from node.sensor.sensor_collector import TempHumCollector from node.sensor.sensor_collector import LuxCollector from node.sensor.sensor_collector import DistanceCollector from node.sensor.sensor_collector import MotionCollector from node.sensor.sensor_collector import PowerCollector from node.sensor.device_controller import RelayController from node.sensor.device_controller import MotorController from node.communication.log_sender import LogSender from node.communication.log_message import LogMessage from config.config import SensorConfig from config.config import MqttConfig from config.config import Config class SensorDaemon(object): tempThread = None luxThread = None distanceThread = None motionThread = None powerThread = None motorThread = None runTemp = False; runLux = False; runDistance = False; runMotion = False; runPower = False; runMotor = False; def __init__(self): ''' Constructor ''' def startTempHumDaemon(self): if self.tempThread == None: self.tempThread = Thread(target=self.logTempHum, args=(SensorType.TEMP_HUM,)) self.runTemp = True self.tempThread.start() def stopTempHumDaemon(self): self.runTemp = False self.tempThread = None def startLuxDaemon(self): if self.luxThread == None: self.luxThread = Thread(target=self.logLux) self.runLux = True self.luxThread.start() def stopLuxDaemon(self): self.runLux = False self.luxThread = None def startDistanceDaemon(self): if self.distanceThread == None: self.distanceThread = Thread(target=self.logDistance) self.runDistance = True self.distanceThread.start() def stopDistanceDaemon(self): self.runDistance = False self.distanceThread = None def startMotionDaemon(self): if self.motionThread == None: self.motionThread = Thread(target=self.logMotion) self.runMotion = True self.motionThread.start() def stopMotionDaemon(self): self.runMotion = False self.motionThread = None def startPowerDaemon(self): if self.powerThread == None: self.powerThread = Thread(target=self.logPower) self.runPower = True self.powerThread.start() def stopPowerDaemon(self): self.runPower = False self.powerThread = None def startMotorDaemon(self): if self.motorThread == None: self.motorThread = Thread(target=self.useMotor) self.runMotor = True self.motorThread.start() def stopMotorDaemon(self): self.runMotor = False self.motorThread = None def logTempHum(self, sensorType): dataCollector = TempHumCollector() logSender = LogSender() #logSender.connect() while(True and self.runTemp == True): if sensorType == SensorType.TEMP_HUM: dataCollector.readTempHum() #sensorMessage = LogMessage(1, int(round(time.time() * 1000))) #sensorMessage.data.append(dataCollector.tempData) #sensorMessage.data.append(dataCollector.humData) else: sensorMessage = 0 #logSender.sendLog(MqttConfig.TOPIC_LOG_TEMP_HUM, sensorMessage) #if Config.DEVELOPER_MODE_ON: #logging.log(logging.DEBUG, sensorMessage) #logging.info(sensorMessage) time.sleep(SensorConfig.TIME_BETWEEN_TWO_MEASUREMENT_IN_SEC) def logLux(self): dataCollector = LuxCollector() logSender = LogSender() #logSender.connect() while(True and self.runLux == True): dataCollector.readLux() sensorMessage = LogMessage(1, int(round(time.time() * 1000))) sensorMessage.data.append(dataCollector.luxData) #logSender.sendLog(MqttConfig.TOPIC_LOG_LUX, sensorMessage) if Config.DEVELOPER_MODE_ON: logging.log(logging.DEBUG, sensorMessage) logging.info(sensorMessage) time.sleep(SensorConfig.TIME_BETWEEN_TWO_MEASUREMENT_IN_SEC) def logDistance(self): dataCollector = DistanceCollector() relayController = RelayController(SensorConfig.GPIO_RELAY_IN_1) logSender = LogSender() #logSender.connect() isTriggered = False sensorMessage = None triggerTime = 0 while(True and self.runDistance == True): dataCollector.readDistance() if dataCollector.distanceData != None: if SensorConfig.DISTANCE_TRIGGER_STANDBY_START <= dataCollector.distanceData.value <= SensorConfig.DISTANCE_TRIGGER_STANDBY_END: isTriggered = True triggerTime = int(round(time.time() * 1000)) if isTriggered: if SensorConfig.DISTANCE_TRIGGER_ACTION_START <= dataCollector.distanceData.value <= SensorConfig.DISTANCE_TRIGGER_ACTION_END: actionTime = int(round(time.time() * 1000)) - triggerTime if actionTime <= 3000: sensorMessage = LogMessage(1, int(round(time.time() * 1000))) sensorMessage.data.append(dataCollector.distanceData) #logSender.sendLog(MqttConfig.TOPIC_LOG_DISTANCE, sensorMessage) relayController.switch() print "A tavolsagmero kapcsolt!" isTriggered = False else: isTriggered = False if Config.DEVELOPER_MODE_ON: logging.log(logging.DEBUG, sensorMessage) logging.info(sensorMessage) time.sleep(SensorConfig.TIME_FOR_ACTION_CHECK) def logMotion(self): dataCollector = MotionCollector() relayController = RelayController(SensorConfig.GPIO_RELAY_IN_4) logSender = LogSender() #logSender.connect() startTime = 0 sensorMessage = None while(True and self.runMotion == True): dataCollector.readMotion() if dataCollector.motionData != None: sensorMessage = LogMessage(1, int(round(time.time() * 1000))) sensorMessage.data.append(dataCollector.motionData) #logSender.sendLog(MqttConfig.TOPIC_LOG_MOTION, sensorMessage) if dataCollector.motionData.value == 1: startTime = int(round(time.time() * 1000)) relayController.setOn() timeGap = int(round(time.time() * 1000)) - startTime if timeGap >= 8000: relayController.setOff() if Config.DEVELOPER_MODE_ON: logging.log(logging.DEBUG, sensorMessage) logging.info(sensorMessage) time.sleep(SensorConfig.TIME_FOR_ACTION_CHECK) def logPower(self): dataCollector = PowerCollector() logSender = LogSender() #logSender.connect() while(True and self.runPower == True): dataCollector.readPowerInWatt() if dataCollector.powerData != None: sensorMessage = LogMessage(1, int(round(time.time() * 1000))) sensorMessage.data.append(dataCollector.powerData) #logSender.sendLog(MqttConfig.TOPIC_LOG_POWER, sensorMessage) self.stopTempHumDaemon() time.sleep(1) self.startTempHumDaemon() if Config.DEVELOPER_MODE_ON: logging.log(logging.DEBUG, sensorMessage) logging.info(sensorMessage) time.sleep(SensorConfig.TIME_BETWEEN_TWO_MEASUREMENT_IN_SEC-1)
test_message_dict.py
""" Tests for data class message dict. """ import unittest import threading import time from synchronized_set import SynchronizedSet from src.beans import NodeInformation, NetAddress from src.message_dict import MessageDict, DEFAULT_MESSAGE, DISPATCH_MESSAGE, MESSAGE_SEPARATOR, JSON_SEPARATOR alice_information = NodeInformation(NetAddress(port=4040), name='alice') bob_information = NodeInformation(NetAddress(port=5050), name='bob') peter_information = NodeInformation(NetAddress(port=6060), name='peter') bob_information.wish_master = peter_information class MessageDictTestCase(unittest.TestCase): """ Tests for data class message dict. """ def test_adding(self): """ Adds two text messages to instance of message dict and checks if get_next_message retrieves correctly contacted string with message separator. :return: """ message_dict = MessageDict(bob_information) message_dict.add_message_for_node("test", alice_information) message_dict.add_message_for_node("test2", alice_information) nextMsg = message_dict.get_next_message(alice_information) self.assertEqual('test--__--test2', nextMsg) def test_add_multiple_nodes(self): """ First add three nodes to dict, then add two text messages for all added nodes and checks for each if if get_next_message retrieves correctly contacted string with message separator. :return: None """ message_dict = MessageDict(bob_information) message_dict.add_node(alice_information) message_dict.add_node(bob_information) message_dict.add_node(peter_information) message_dict.add_message_for_all_nodes("test") message_dict.add_message_for_all_nodes("test2") self.assertEqual('test--__--test2', message_dict.get_next_message(alice_information)) self.assertEqual('test--__--test2', message_dict.get_next_message(bob_information)) self.assertEqual('test--__--test2', message_dict.get_next_message(peter_information)) def test_clear(self): """ Test if clear methods remove all queues form message dict :return: None """ message_dict = MessageDict(bob_information) message_dict.add_message_for_node("test", alice_information) message_dict.clear() self.assertEqual(len(message_dict.dict), 0) def test_default_message(self): """ Test if in case of a empty message queue correct default message is returned. :return: None """ message_dict = MessageDict(bob_information) message_dict.add_message_for_node("test", alice_information) nextMsg = message_dict.get_next_message(alice_information) self.assertEqual('test', nextMsg) self.assertEqual(DEFAULT_MESSAGE + JSON_SEPARATOR + bob_information.to_json(), message_dict.get_next_message(alice_information)) def test_wait_unit_all_received(self): """ Checks if wait_until_everybody_received to not stuck in an endless loop after all messages has been taken. :return: None """ message_dict = MessageDict(bob_information) message_dict.add_node(bob_information) message_dict.add_node(peter_information) message_dict.add_dispatch_message(alice_information, SynchronizedSet(set())) t = threading.Thread(target=self.take_message, args=(message_dict,)) t.start() message_dict.wait_until_everybody_received(DISPATCH_MESSAGE + MESSAGE_SEPARATOR + alice_information.to_json()) def take_message(self, message_dict): """" Help method that take the messages to simulate PingMan who send messages. """ time.sleep(1) message_dict.get_next_message(bob_information) message_dict.get_next_message(peter_information) if __name__ == '__main__': unittest.main()
server.py
import cherrypy from cherrypy.lib.static import serve_file import os import sys import webbrowser import threading import time import socket import json from client import addClient, loadClients, clearClients, clientsList, removeClient from transmitJSON import sendJSON, recvJSON def getRootDir(): return os.path.dirname(os.path.abspath(sys.argv[0])) def manageClient(client, addr): data = recvJSON(client) data['ip']=addr[0] try: addClient(data) sendJSON(client, {'ok': True}) except ValueError as e: sendJSON(client, {'ok': False, 'error': str(e)}) finally: client.close() def subscribeHandler(): s = socket.socket() s.settimeout(0.5) s.bind(('0.0.0.0', 3001)) s.listen() print('listen for subscription on port', 3001) while running: try: client, addr = s.accept() manageClient(client, addr) except socket.timeout: pass print('no more listen for subscription') def startBrowser(): time.sleep(2) webbrowser.open('http://localhost:3000') print('browser started !') class Server: @cherrypy.expose(['game.js']) def game(self): cherrypy.response.headers['Content-Type'] = 'application/javascript' return gameJS @cherrypy.expose @cherrypy.tools.json_out() def clients(self): return clientsList() @cherrypy.expose @cherrypy.tools.json_out() @cherrypy.tools.json_in() def remove(self): data = cherrypy.request.json removeClient(data['name']) if __name__ == '__main__': if len(sys.argv) > 1: game = sys.argv[1] else: game = 'Avalam' with open(os.path.join(getRootDir(),f'public/games/{game}.js')) as file: gameJS = file.read().encode('utf8') running = True threading.Thread(target=startBrowser).start() thread = threading.Thread(target=subscribeHandler) thread.start() def stop(): print('STOPPING subscription handler...') global running running = False thread.join() print('subscription handler stopped') loadClients() cherrypy.engine.subscribe('stop', stop) cherrypy.quickstart(Server(), '', 'server.conf')
regressions.py
#!/usr/bin/env python3 from argparse import ArgumentParser import sys import os import subprocess import re import glob import threading import time DESCRIPTION = """Regressor is a tool to run regression tests in a CI env.""" class PrintDotsThread(object): """Prints a dot every "interval" (default is 300) seconds""" def __init__(self, interval=300): self.interval = interval thread = threading.Thread(target=self.run, args=()) thread.daemon = True thread.start() def run(self): """ Runs until the main Python thread exits. """ ## Print a newline at the very beginning. print("") while True: # Print dot print(".") time.sleep(self.interval) class regressor(): _re_sanitizer_log = re.compile(r"""ERROR: (libFuzzer|UndefinedBehaviorSanitizer)""") def __init__(self, description, args): self._description = description self._args = self.parseCmdLine(description, args) self._repo_root = os.path.dirname(sys.path[0]) self._fuzzer_path = os.path.join(self._repo_root, "build/test/tools/ossfuzz") self._logpath = os.path.join(self._repo_root, "test_results") def parseCmdLine(self, description, args): argParser = ArgumentParser(description) argParser.add_argument('-o', '--out-dir', required=True, type=str, help="""Directory where test results will be written""") return argParser.parse_args(args) @staticmethod def run_cmd(command, logfile=None, env=None): """ Args: command (str): command to run logfile (str): log file name env (dict): dictionary holding key-value pairs for bash environment variables Returns: int: The exit status of the command. Exit status codes are: 0 -> Success 1-255 -> Failure """ if not logfile: logfile = os.devnull if not env: env = os.environ.copy() logfh = open(logfile, 'w') proc = subprocess.Popen(command, shell=True, executable='/bin/bash', env=env, stdout=logfh, stderr=subprocess.STDOUT) ret = proc.wait() logfh.close() return ret def process_log(self, logfile): """ Args: logfile (str): log file name Returns: bool: Test status. True -> Success False -> Failure """ ## Log may contain non ASCII characters, so we simply stringify them ## since they don't matter for regular expression matching rawtext = str(open(logfile, 'rb').read()) return not re.search(self._re_sanitizer_log, rawtext) def run(self): """ Returns: bool: Test status. True -> All tests succeeded False -> At least one test failed """ testStatus = [] for fuzzer in glob.iglob("{}/*_ossfuzz".format(self._fuzzer_path)): basename = os.path.basename(fuzzer) logfile = os.path.join(self._logpath, "{}.log".format(basename)) corpus_dir = "/tmp/solidity-fuzzing-corpus/{0}_seed_corpus" \ .format(basename) cmd = "find {0} -type f | xargs -n1 sh -c '{1} $0 || exit 255'".format(corpus_dir, fuzzer) self.run_cmd(cmd, logfile=logfile) ret = self.process_log(logfile) if not ret: print( "\t[-] libFuzzer reported failure for {0}. " "Failure logged to test_results".format( basename)) testStatus.append(False) else: print("\t[+] {0} passed regression tests.".format(basename)) testStatus.append(True) return all(testStatus) if __name__ == '__main__': dotprinter = PrintDotsThread() tool = regressor(DESCRIPTION, sys.argv[1:]) sys.exit(not tool.run())
main.py
#!/usr/bin/env pybricks-micropython import struct, threading from pybricks import ev3brick as brick from pybricks.ev3devices import (Motor, TouchSensor, ColorSensor, InfraredSensor, UltrasonicSensor, GyroSensor) from pybricks.parameters import (Port, Stop, Direction, Button, Color, SoundFile, ImageFile, Align) from pybricks.tools import print, wait, StopWatch from pybricks.robotics import DriveBase from devices import detectJoystick from joystick import JoyStick, BUTTON_A, BUTTON_X SPEED = 100 STEERING = 90 STATUS_STOPPED = 0 STATUS_FORWARD = 1 STATUS_BACKWARD = 2 STATUS_STEERING = 3 COLORS = (None, Color.GREEN, Color.RED, Color.YELLOW) class Driver(): def __init__(self, leftMotor, rightMotor, diameter, axle): self.driver = DriveBase(leftMotor, rightMotor, diameter, axle) self.x = 0 self.y = 0 self.speed = 0 self.steering = 0 def drive(self, speed, steering): self.speed = speed self.steering = steering if self.speed == 0: self.driver.stop() else: self.driver.drive(self.speed, self.steering) class Robot(): def __init__(self, leftMotor, rightMotor, topMotor, diameter, axle, maxSpeed=300, maxSteering=180, port=Port.S4): self.driver = Driver(leftMotor, rightMotor, diameter, axle) self.cannon = topMotor self.ultrasonic = UltrasonicSensor(port) self.speedStep = 32767 // maxSpeed self.steeringStep = 32767 // maxSteering self.active = True def drive(self, x, y): # map y (-32768 ~ +32767) to speed (+maxSpeed ~ -maxSpeed): speed = -y // self.speedStep # map x (-32768 ~ +32767) to steering (-maxSteering ~ +maxSteering): steering = x // self.steeringStep self.driver.drive(speed, steering) def target(self, x): self.cannon.run(-x // 327) def fire(self): brick.sound.file('cannon.wav') def inactive(self): self.active = False self.drive(0, 0) brick.sound.beep() def autoStopLoop(robot): while robot.active: if robot.ultrasonic.distance() < 200: robot.drive(0, 0) wait(100) def main(): brick.sound.beep() joystickEvent = detectJoystick(['Controller']) if joystickEvent: robot = Robot(Motor(Port.D), Motor(Port.A), Motor(Port.B), 55, 200) t = threading.Thread(target=autoStopLoop, args=(robot,)) t.start() def onButtonPressed(code): if code == BUTTON_X: robot.inactive() return False if code == BUTTON_A: robot.fire() return True def onLeftJoyChanged(x, y): robot.drive(x, y) def onRightJoyChanged(x, y): robot.target(x) joystick = JoyStick(joystickEvent) joystick.setButtonHandler(onButtonPressed) joystick.setJoyLeftHandler(onLeftJoyChanged) joystick.setJoyRightHandler(onRightJoyChanged) joystick.startLoop() else: brick.sound.beep() main()
ir_engine.py
# # Copyright (c) 2018-2019 Intel Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import datetime import json import queue from threading import Thread from openvino.inference_engine import IENetwork, IEPlugin from ie_serving.config import GLOBAL_CONFIG from ie_serving.logger import get_logger from ie_serving.models import InferenceStatus from ie_serving.models.shape_management.batching_info import BatchingInfo from ie_serving.models.shape_management.shape_info import ShapeInfo from ie_serving.models.shape_management.utils import BatchingMode, ShapeMode logger = get_logger(__name__) def inference_callback(status, py_data): ir_engine = py_data['ir_engine'] request = py_data['request'] ireq_index = py_data['ireq_index'] start_time = py_data['start_time'] duration = (datetime.datetime.now() - start_time).total_seconds() * 1000 if status == InferenceStatus.OK: request.set_result(ireq_index=ireq_index, result=ir_engine.exec_net.requests[ireq_index]. outputs) else: request.set_result(ireq_index=ireq_index, result="Error occurred during inference execution") logger.debug("[Inference callback] --- Inference completed in {} ms". format(duration)) class IrEngine(): def __init__(self, model_name, model_version, net, plugin, mapping_config, exec_net, batching_info, shape_info, free_ireq_index_queue, num_ireq, requests_queue, target_device, plugin_config): self.model_name = model_name self.model_version = model_version self.exec_net = exec_net self.net = net self.batching_info = batching_info self.shape_info = shape_info self.plugin = plugin self.input_tensor_names = list(net.inputs.keys()) self.output_tensor_names = list(net.outputs.keys()) self.model_keys = self.set_keys(mapping_config) self.input_key_names = list(self.model_keys['inputs'].keys()) self.free_ireq_index_queue = free_ireq_index_queue self.num_ireq = num_ireq self.requests_queue = requests_queue self.target_device = target_device self.plugin_config = plugin_config logger.info("Matched keys for model: {}".format(self.model_keys)) self.engine_active = True self.inference_thread = Thread(target=self.start_inference_thread) self.inference_thread.daemon = True self.inference_thread.start() @classmethod def build(cls, model_name, model_version, model_xml, model_bin, mapping_config, batch_size_param, shape_param, num_ireq, target_device, plugin_config): plugin = IEPlugin(device=target_device, plugin_dirs=GLOBAL_CONFIG['plugin_dir']) if GLOBAL_CONFIG['cpu_extension'] is not None \ and 'CPU' in target_device: plugin.add_cpu_extension(GLOBAL_CONFIG['cpu_extension']) net = IENetwork(model=model_xml, weights=model_bin) batching_info = BatchingInfo(batch_size_param) shape_info = ShapeInfo(shape_param, net.inputs) if batching_info.mode == BatchingMode.FIXED: net.batch_size = batching_info.batch_size else: batching_info.batch_size = net.batch_size effective_batch_size = batching_info.get_effective_batch_size() logger.debug("[Model: {}, version: {}] --- effective batch size - {}" .format(model_name, model_version, effective_batch_size)) ############################### # Initial shape setup if shape_info.mode == ShapeMode.FIXED: logger.debug("[Model: {}, version: {}] --- Setting shape to " "fixed value: {}".format(model_name, model_version, shape_info.shape)) net.reshape(shape_info.shape) elif shape_info.mode == ShapeMode.AUTO: logger.debug("[Model: {}, version: {}] --- Setting shape to " "automatic".format(model_name, model_version)) net.reshape({}) elif shape_info.mode == ShapeMode.DEFAULT: logger.debug("[Model: {}, version: {}] --- Setting shape to " "default".format(model_name, model_version)) ############################### # Creating free infer requests indexes queue free_ireq_index_queue = queue.Queue(maxsize=num_ireq) for ireq_index in range(num_ireq): free_ireq_index_queue.put(ireq_index) ############################### requests_queue = queue.Queue(maxsize=GLOBAL_CONFIG[ 'engine_requests_queue_size']) exec_net = plugin.load(network=net, num_requests=num_ireq, config=plugin_config) ir_engine = cls(model_name=model_name, model_version=model_version, mapping_config=mapping_config, net=net, plugin=plugin, exec_net=exec_net, batching_info=batching_info, shape_info=shape_info, free_ireq_index_queue=free_ireq_index_queue, num_ireq=num_ireq, requests_queue=requests_queue, target_device=target_device, plugin_config=plugin_config) return ir_engine def _get_mapping_data_if_exists(self, mapping_config): if mapping_config is not None: try: with open(mapping_config, 'r') as f: mapping_data = json.load(f) return mapping_data except Exception as e: message = "Error occurred while reading mapping_config in " \ "path {}. Message error {}".format(mapping_config, e) logger.error("[Model: {}, version: {}] --- {}".format( self.model_name, self.model_version, message)) return None def _return_proper_key_value(self, data: dict, which_way: str, tensors: list): temp_keys = {} for input_tensor in tensors: if which_way in data: if input_tensor in data[which_way]: temp_keys.update({ data[which_way][input_tensor]: input_tensor }) else: temp_keys.update({input_tensor: input_tensor}) else: temp_keys.update({input_tensor: input_tensor}) return temp_keys def _set_tensor_names_as_keys(self): keys_names = {'inputs': {}, 'outputs': {}} for input_tensor in self.input_tensor_names: keys_names['inputs'].update({input_tensor: input_tensor}) for output_tensor in self.output_tensor_names: keys_names['outputs'].update({output_tensor: output_tensor}) return keys_names def _set_names_in_config_as_keys(self, data: dict): keys_names = {'inputs': self._return_proper_key_value( data=data, which_way='inputs', tensors=self.input_tensor_names), 'outputs': self._return_proper_key_value( data=data, which_way='outputs', tensors=self.output_tensor_names)} return keys_names def set_keys(self, mapping_config): mapping_data = self._get_mapping_data_if_exists(mapping_config) if mapping_data is None: return self._set_tensor_names_as_keys() else: return self._set_names_in_config_as_keys(mapping_data) def start_inference_thread(self): logger.debug("Starting inference service for model {} version {}" .format(self.model_name, self.model_version)) while self.engine_active: try: request = self.requests_queue.get(timeout=GLOBAL_CONFIG[ 'engine_requests_queue_timeout']) except queue.Empty: continue error_message = self.adjust_network_inputs_if_needed( request.inference_input) if error_message is not None: request.result = error_message continue ireq_index = self.free_ireq_index_queue.get() py_data = { 'ir_engine': self, 'ireq_index': ireq_index, 'request': request, 'start_time': datetime.datetime.now() } self.exec_net.requests[ireq_index].set_completion_callback( py_callback=inference_callback, py_data=py_data) self.exec_net.requests[ireq_index].async_infer( request.inference_input) logger.debug("Stopping inference service for model {} version {}" .format(self.model_name, self.model_version)) def stop_inference_service(self): self.engine_active = False self.inference_thread.join() def suppress_inference(self): # Wait for all inferences executed on deleted engines to end logger.debug("[Model: {} version: {}] --- Waiting for in progress " "inferences to finish...". format(self.model_name, self.model_version)) engine_suppressed = False while not engine_suppressed: if self.free_ireq_index_queue.full(): engine_suppressed = True logger.debug("[Model: {} version: {}] --- In progress inferences " "has been finalized...". format(self.model_name, self.model_version)) def adjust_network_inputs_if_needed(self, inference_input): error_message = None reshape_param = self.detect_shapes_incompatibility( inference_input) if reshape_param is not None: self.suppress_inference() error_message = self.reshape(reshape_param) return error_message def detect_shapes_incompatibility(self, inference_input): # Compares workload shapes with engine inputs shapes. Returns # reshape_param # reshape_param is inputs shapes dictionary (input_name:shape pairs) # for reshapable models and batch size for non-reshapable. If no # changes needed - reshape_param is None reshape_param = None inputs_shapes = self.scan_input_shapes( inference_input) if inputs_shapes: reshape_param = inputs_shapes # For non-reshapable models, batch_size of first input is the # reshape parameter if self.shape_info.mode == ShapeMode.DISABLED: input_shape = inputs_shapes[list(inputs_shapes.keys())[0]] batch_size = list(input_shape)[0] reshape_param = batch_size return reshape_param def scan_input_shapes(self, data: dict): # Takes dictionary of input_name:numpy_array pairs. # returns dict of input_name:shape pairs with shapes different from # current inputs shapes in a network - empty dict if inference # workload and network inputs shapes are equal. changed_input_shapes = {} for input_name, input_data in data.items(): net_input_shape = tuple(self.net.inputs[input_name].shape) if net_input_shape != input_data.shape: changed_input_shapes[input_name] = input_data.shape logger.debug("[Model: {}, version: {}] --- Shape change " "required for input: {}. Current " "shape: {}. Required shape: {}" .format(self.model_name, self.model_version, input_name, net_input_shape, input_data.shape)) return changed_input_shapes def reshape(self, reshape_param): reshape_start_time = datetime.datetime.now() if type(reshape_param) is dict: error_message = self._reshape(reshape_param) elif type(reshape_param) is int: error_message = self._change_batch_size(reshape_param) else: error_message = "Unknown error occurred in input " \ "reshape preparation" reshape_end_time = datetime.datetime.now() if error_message is not None: logger.debug("[Model: {}, version: {}] --- {}".format( self.model_name, self.model_version, error_message)) return error_message duration = \ (reshape_end_time - reshape_start_time).total_seconds() * 1000 logger.debug( "IR_ENGINE; network reshape completed; {}; {}; {}ms".format( self.model_name, self.model_version, duration)) return None def _reshape(self, inputs_shapes: dict): # Takes dictionary of input_name:shape pairs as parameter # (obtained from scan_input_shapes method) # Returns error message on error and None if operation succeeded logger.debug("[Model: {}, version: {}] --- Reshaping " "network...".format(self.model_name, self.model_version)) message = None try: self.net.reshape(inputs_shapes) except Exception as e: message = "Error occurred while reshaping: {}".format(str(e)) logger.debug("[Model: {}, version: {}] --- {}".format( self.model_name, self.model_version, message)) return message logger.debug("[Model: {}, version: {}] --- Reshaped successfully". format(self.model_name, self.model_version)) logger.debug("[Model: {}, version: {}] --- Loading network...". format(self.model_name, self.model_version)) try: self.exec_net = self.plugin.load(network=self.net, num_requests=self.num_ireq, config=self.plugin_config) except Exception as e: message = "Error occurred while loading network: {}".format( str(e)) logger.debug("[Model: {}, version: {}] --- {}".format( self.model_name, self.model_version, message)) return message logger.debug("[Model: {}, version: {}] --- Network loaded " "successfully".format(self.model_name, self.model_version)) return message def _change_batch_size(self, batch_size: int): # Takes load batch size as a parameter. Used to change input batch # size in non-reshapable models logger.debug("[Model: {}, version: {}] --- Changing batch size. " "Loading network...".format(self.model_name, self.model_version)) message = None old_batch_size = self.net.batch_size self.net.batch_size = batch_size try: self.exec_net = self.plugin.load(network=self.net, num_requests=self.num_ireq, config=self.plugin_config) except Exception as e: message = "Error occurred while loading network: {}".format( str(e)) logger.debug("[Model: {}, version: {}] --- {}".format( self.model_name, self.model_version, message)) self.net.batch_size = old_batch_size return message logger.debug("[Model: {}, version: {}] --- Network loaded " "successfully. Batch size changed.". format(self.model_name, self.model_version)) return message
dmock.py
# ============================================================================= # # Copyright (c) 2016, Cisco Systems # All rights reserved. # # # Author: Klaudiusz Staniek # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF # THE POSSIBILITY OF SUCH DAMAGE. # ============================================================================= from telnetsrv.threaded import TelnetHandler, command import SocketServer import os class TelnetServer(SocketServer.TCPServer): allow_reuse_address = True class DeviceHandler(TelnetHandler): """Generic device handler""" # Dictionary to alter the command output provided in the text file. Usefull for test cases where similating # specific device responses is needed, i.e.: # response_dict = { # 'show_install_request': "10.77.132.127: Permission denied" # } response_dict = {} # Dictionary containing the method names from the Handler to be executed before and after specific command, i.e.: # action_dict = { # 'show_install_request': {'AFTER': 'disconnect'} # } action_dict = {} authNeedUser = True authNeedPass = True USERNAME = "admin" PASSWORD = "admin" AUTH_MESSAGE = "User Access Verification\n" PROMPT_USER = "Username: " PROMPT_PASS = "Password: " PROMPT = "IOS#" WELCOME = "\n" #GOODBYE = "Connection closed by foreign host." GOODBYE = None def authCallback(self, username, password): if self.authNeedUser: if username != self.USERNAME: raise Exception() if self.authNeedPass: if password != self.PASSWORD: raise Exception() return True # if username != self.USERNAME or password != self.PASSWORD: # raise Exception() # return True def get_response(self, command_line): response = self.response_dict.get(command_line, None) if response is None: directory = os.path.dirname(os.path.realpath(__file__)) filename = os.path.join(directory, self.platform.lower(), command_line + ".txt") try: with open(filename) as f: response = f.read() except IOError: response = None return response def get_action(self, command_line, when): action_name = None data = self.action_dict.get(command_line, None) if data: action_name = data.get(when, None) return action_name def cmd(self, params): """Default command handler""" params.insert(0, self.input.cmd) command_line = "_".join(params) response = self.get_response(command_line) action_name = self.get_action(command_line, 'BEFORE') if action_name: action = getattr(self, action_name, None) if action: action() if response: self.writeresponse(response) action_name = self.get_action(command_line, 'AFTER') if action_name: action = getattr(self, action_name, None) if action: action() @command('terminal') def terminal(self, params): """ Just accept the terminal command. No action. """ pass @command('wrongcommand') def wrongcommand(self, params): self.writeresponse(self.WRONGCOMMAND) @command('exit') def exit(self, params): self.RUNSHELL = False if self.GOODBYE: self.writeline(self.GOODBYE) def authentication_ok(self): """Checks the authentication and sets the username of the currently connected terminal. Returns True or False """ username = None password = None for _ in range(3): self.writeline(self.AUTH_MESSAGE) if self.authCallback: if self.authNeedUser: username = self.readline(prompt=self.PROMPT_USER, use_history=False) if username == 'QUIT': self.RUNSHELL = False return True if self.authNeedPass: password = self.readline(echo=False, prompt=self.PROMPT_PASS, use_history=False) if password == 'QUIT': self.RUNSHELL = False return True if self.DOECHO: pass #self.write("\n") try: self.authCallback(username, password) except Exception: self.username = None continue else: # Successful authentication self.username = username return True else: # No authentication desired self.username = None return True self.writeresponse(self.AUTH_FAILED_MESSAGE) return False class XRHandler(DeviceHandler): """Generic IOSXR handler""" PROMPT = "RP/0/RP0/CPU0:ios#" TELNET_ISSUE = "\nUser Access Verification\n" # does not work AUTH_MESSAGE = "\nUser Access Verification\n" AUTH_FAILED_MESSAGE = "\n% Authentication failed" PROMPT_USER = "Username: " PROMPT_PASS = "Password: " WRONGCOMMAND = """ ^ % Invalid input detected at '^' marker.""" response_dict = {"wrongcommand": WRONGCOMMAND} @command(['show', 'admin']) def show_admin(self, params): self.cmd(params=params) class ASR9KHandler(XRHandler): """ Standard ASR9000 Handler """ platform = "ASR9K" class ASR9K64Handler(XRHandler): """ Standard ASR9000 64 bit Handler """ platform = "ASR9K-64" class NCS1KHandler(XRHandler): platform = "NCS1K" response_dict = { "show_install_request": "10.77.132.127: Permission denied"} action_dict = { "show_install_request": {"AFTER": "disconnect"} } def disconnect(self): self.RUNSHELL = False class NCS5500Handler(XRHandler): platform = "NCS5500" class NXOSHandler(DeviceHandler): WELCOME = """Cisco Nexus Operating System (NX-OS) Software TAC support: http://www.cisco.com/tac Copyright (C) 2002-2016, Cisco and/or its affiliates. All rights reserved. The copyrights to certain works contained in this software are owned by other third parties and used and distributed under their own licenses, such as open source. This software is provided "as is," and unless otherwise stated, there is no warranty, express or implied, including but not limited to warranties of merchantability and fitness for a particular purpose. Certain components of this software are licensed under the GNU General Public License (GPL) version 2.0 or GNU General Public License (GPL) version 3.0 or the GNU Lesser General Public License (LGPL) Version 2.1 or Lesser General Public License (LGPL) Version 2.0. A copy of each such license is available at http://www.opensource.org/licenses/gpl-2.0.php and http://opensource.org/licenses/gpl-3.0.html and http://www.opensource.org/licenses/lgpl-2.1.php and http://www.gnu.org/licenses/old-licenses/library.txt.""" PROMPT = "switch#" TELNET_ISSUE = "\nUser Access Verification" # does not work AUTH_MESSAGE = "\nUser Access Verification" AUTH_FAILED_MESSAGE = "\n% Authentication failed" PROMPT_USER = "switch login: " PROMPT_PASS = "Password: " WRONGCOMMAND = """ ^ % Invalid command at '^' marker.""" @command('show') def show(self, params): self.cmd(params=params) class NX9KHandler(NXOSHandler): platform = "N9K" class SunHandler(DeviceHandler): PROMPT = "TEST " # Intentionally weird prompt WELCOME = "Last login: Wed Jul 27 00:44:30 from localhost" authNeedUser = True authNeedPass = True response_dict = {} action_dict = {} USERNAME = "admin" PASSWORD = "admin" PROMPT_USER = "login:" PROMPT_PASS = "This is your AD password:" def authCallback(self, username, password): if password != self.PASSWORD: raise Exception() return True def authentication_ok(self): username = None password = None for _ in range(1): if self.authCallback: if self.authNeedPass: password = self.readline(echo=False, prompt=self.PROMPT_PASS, use_history=False) if password == 'QUIT': self.RUNSHELL = False return True if self.DOECHO: self.write("\n") try: self.authCallback(None, password) except Exception: self.username = None continue else: # Successful authentication self.username = username return True else: # No authentication desired self.username = None return True self.writeresponse("Login incorrect") return False @command('hostname') def hostname(self, params): self.writeresponse("sunserver") @command('uname') def uname(self, params): self.writeresponse('SunOS 5.10') @command('telnet') def telnet(self, params): self.writeresponse("""Trying host1... telnet: connect to address 155.156.1.3: Operation timed out""") # self.RUNSHELL = False class IOSXEHandler(DeviceHandler): AUTH_MESSAGE = "\n\nUser Access Verification\n" PROMPT_PASS = "Password: Kerberos: No default realm defined for Kerberos!\n" PROMPT_PASS_E = "Password: " AUTH_FAILED_MESSAGE = "% Bad passwords\n" ENABLE_FAILED_MESSAGE = "% Bad secrets\n" PROMPT = "IOS#" WELCOME = "" ENABLE_PASSWORD = "admin" WRONGCOMMAND = "% Bad IP address or host name% Unknown command or computer name, or unable to find computer address" @command(['show']) def show(self, params): self.cmd(params=params) @command(['enable', 'en']) def enable(self, params): for _ in range(3): password = self.readline(echo=False, prompt=self.PROMPT_PASS_E, use_history=False) if password == 'QUIT': self.RUNSHELL = False return True if self.DOECHO: self.write("\n") if password == self.ENABLE_PASSWORD: self.PROMPT = self.PROMPT[:-1] + "#" break else: self.writeresponse(self.ENABLE_FAILED_MESSAGE) @command(['disable']) def disable(self, params): self.PROMPT = self.PROMPT[:-1] + ">" class ASR920Handler(IOSXEHandler): platform = "ASR920" authNeedUser = False PROMPT = "CSG-5502-ASR920>" class ASR903Handler(IOSXEHandler): platform = "ASR903" authNeedUser = False PROMPT = "PAN-5205-ASR903>" class ASR901Handler(IOSXEHandler): platform = "ASR901" authNeedUser = False PROMPT = "CSG-1202-ASR901>" if __name__ == '__main__': from threading import Thread server = TelnetServer(("127.0.0.1", 10025), ASR9KHandler) server_thread = Thread(target=server.serve_forever) server_thread.daemon = True server_thread.start() raw_input("Press Enter to continue...") server.shutdown() server.server_close() server_thread.join()
labels.py
import hashlib import requests import threading import json import sys import traceback import aes import base64 import electrum_sib from electrum_sib.plugins import BasePlugin, hook from electrum_sib.i18n import _ class LabelsPlugin(BasePlugin): def __init__(self, parent, config, name): BasePlugin.__init__(self, parent, config, name) self.target_host = 'sync.bytesized-hosting.com:9090' self.wallets = {} def encode(self, wallet, msg): password, iv, wallet_id = self.wallets[wallet] encrypted = electrum_sib.bitcoin.aes_encrypt_with_iv(password, iv, msg.encode('utf8')) return base64.b64encode(encrypted) def decode(self, wallet, message): password, iv, wallet_id = self.wallets[wallet] decoded = base64.b64decode(message) decrypted = electrum_sib.bitcoin.aes_decrypt_with_iv(password, iv, decoded) return decrypted.decode('utf8') def get_nonce(self, wallet): # nonce is the nonce to be used with the next change nonce = wallet.storage.get('wallet_nonce') if nonce is None: nonce = 1 self.set_nonce(wallet, nonce) return nonce def set_nonce(self, wallet, nonce): self.print_error("set", wallet.basename(), "nonce to", nonce) wallet.storage.put("wallet_nonce", nonce) @hook def set_label(self, wallet, item, label): if not wallet in self.wallets: return nonce = self.get_nonce(wallet) wallet_id = self.wallets[wallet][2] bundle = {"walletId": wallet_id, "walletNonce": nonce, "externalId": self.encode(wallet, item), "encryptedLabel": self.encode(wallet, label)} t = threading.Thread(target=self.do_request, args=["POST", "/label", False, bundle]) t.setDaemon(True) t.start() # Caller will write the wallet self.set_nonce(wallet, nonce + 1) def do_request(self, method, url = "/labels", is_batch=False, data=None): url = 'https://' + self.target_host + url kwargs = {'headers': {}} if method == 'GET' and data: kwargs['params'] = data elif method == 'POST' and data: kwargs['data'] = json.dumps(data) kwargs['headers']['Content-Type'] = 'application/json' response = requests.request(method, url, **kwargs) if response.status_code != 200: raise BaseException(response.status_code, response.text) response = response.json() if "error" in response: raise BaseException(response["error"]) return response def push_thread(self, wallet): wallet_id = self.wallets[wallet][2] bundle = {"labels": [], "walletId": wallet_id, "walletNonce": self.get_nonce(wallet)} for key, value in wallet.labels.iteritems(): try: encoded_key = self.encode(wallet, key) encoded_value = self.encode(wallet, value) except: self.print_error('cannot encode', repr(key), repr(value)) continue bundle["labels"].append({'encryptedLabel': encoded_value, 'externalId': encoded_key}) self.do_request("POST", "/labels", True, bundle) def pull_thread(self, wallet, force): wallet_id = self.wallets[wallet][2] nonce = 1 if force else self.get_nonce(wallet) - 1 self.print_error("asking for labels since nonce", nonce) try: response = self.do_request("GET", ("/labels/since/%d/for/%s" % (nonce, wallet_id) )) if response["labels"] is None: self.print_error('no new labels') return result = {} for label in response["labels"]: try: key = self.decode(wallet, label["externalId"]) value = self.decode(wallet, label["encryptedLabel"]) except: continue try: json.dumps(key) json.dumps(value) except: self.print_error('error: no json', key) continue result[key] = value for key, value in result.items(): if force or not wallet.labels.get(key): wallet.labels[key] = value self.print_error("received %d labels" % len(response)) # do not write to disk because we're in a daemon thread wallet.storage.put('labels', wallet.labels) self.set_nonce(wallet, response["nonce"] + 1) self.on_pulled(wallet) except Exception as e: traceback.print_exc(file=sys.stderr) self.print_error("could not retrieve labels") def start_wallet(self, wallet): nonce = self.get_nonce(wallet) self.print_error("wallet", wallet.basename(), "nonce is", nonce) mpk = wallet.get_fingerprint() if not mpk: return password = hashlib.sha1(mpk).digest().encode('hex')[:32] iv = hashlib.sha256(password).digest()[:16] wallet_id = hashlib.sha256(mpk).digest().encode('hex') self.wallets[wallet] = (password, iv, wallet_id) # If there is an auth token we can try to actually start syncing t = threading.Thread(target=self.pull_thread, args=(wallet, False)) t.setDaemon(True) t.start() def stop_wallet(self, wallet): self.wallets.pop(wallet, None)
uiautomation.py
# coding=utf-8 __author__ = 'lxn3032' import os import requests import time import warnings import threading import atexit from airtest.core.api import connect_device, device as current_device from airtest.core.android.ime import YosemiteIme from hrpc.client import RpcClient from hrpc.transport.http import HttpTransport from poco.pocofw import Poco from poco.agent import PocoAgent from poco.sdk.Attributor import Attributor from poco.sdk.interfaces.screen import ScreenInterface from poco.utils.hrpc.hierarchy import RemotePocoHierarchy from poco.utils.airtest.input import AirtestInput from poco.utils import six from poco.drivers.android.utils.installation import install, uninstall __all__ = ['AndroidUiautomationPoco', 'AndroidUiautomationHelper'] this_dir = os.path.dirname(os.path.realpath(__file__)) PocoServicePackage = 'com.netease.open.pocoservice' PocoServicePackageTest = 'com.netease.open.pocoservice.test' class AndroidRpcClient(RpcClient): def __init__(self, endpoint): self.endpoint = endpoint super(AndroidRpcClient, self).__init__(HttpTransport) def initialize_transport(self): return HttpTransport(self.endpoint, self) # deprecated class AttributorWrapper(Attributor): """ 部分手机上仍不支持Accessibility.ACTION_SET_TEXT,使用YosemiteIme还是兼容性最好的方案 这个class会hook住set_text,然后改用ime的text方法 """ def __init__(self, remote, ime): self.remote = remote self.ime = ime def getAttr(self, node, attrName): return self.remote.getAttr(node, attrName) def setAttr(self, node, attrName, attrVal): if attrName == 'text' and attrVal != '': # 先清除了再设置,虽然这样不如直接用ime的方法好,但是也能凑合用着 current_val = self.remote.getAttr(node, 'text') if current_val: self.remote.setAttr(node, 'text', '') self.ime.text(attrVal) else: self.remote.setAttr(node, attrName, attrVal) class ScreenWrapper(ScreenInterface): def __init__(self, screen): super(ScreenWrapper, self).__init__() self.screen = screen def getScreen(self, width): # Android上PocoService的实现为仅返回b64编码的图像,格式固定位jpg b64img = self.screen.getScreen(width) return b64img, 'jpg' def getPortSize(self): return self.screen.getPortSize() class AndroidPocoAgent(PocoAgent): def __init__(self, endpoint, ime, use_airtest_input=False): self.client = AndroidRpcClient(endpoint) remote_poco = self.client.remote('poco-uiautomation-framework') dumper = remote_poco.dumper selector = remote_poco.selector attributor = remote_poco.attributor hierarchy = RemotePocoHierarchy(dumper, selector, attributor) if use_airtest_input: inputer = AirtestInput() else: inputer = remote_poco.inputer super(AndroidPocoAgent, self).__init__(hierarchy, inputer, ScreenWrapper(remote_poco.screen), None) def on_bind_driver(self, driver): super(AndroidPocoAgent, self).on_bind_driver(driver) if isinstance(self.input, AirtestInput): self.input.add_preaction_cb(driver) class AndroidUiautomationPoco(Poco): """ Poco Android implementation for testing **Android native apps**. Args: device (:py:obj:`Device`): :py:obj:`airtest.core.device.Device` instance provided by ``airtest``. leave the parameter default and the default device will be chosen. more details refer to ``airtest doc`` using_proxy (:py:obj:`bool`): whether use adb forward to connect the Android device or not force_restart (:py:obj:`bool`): whether always restart the poco-service-demo running on Android device or not options: see :py:class:`poco.pocofw.Poco` Examples: The simplest way to initialize AndroidUiautomationPoco instance and no matter your device network status:: from poco.drivers.android.uiautomation import AndroidUiautomationPoco poco = AndroidUiautomationPoco() poco('android:id/title').click() ... """ def __init__(self, device=None, using_proxy=True, force_restart=False, use_airtest_input=False, **options): # 加这个参数为了不在最新的pocounit方案中每步都截图 self.screenshot_each_action = True if options.get('screenshot_each_action') is False: self.screenshot_each_action = False self.device = device or current_device() if not self.device: self.device = connect_device("Android:///") self.adb_client = self.device.adb if using_proxy: self.device_ip = self.adb_client.host or "127.0.0.1" else: self.device_ip = self.device.get_ip_address() # save current top activity (@nullable) current_top_activity_package = self.device.get_top_activity_name() if current_top_activity_package is not None: current_top_activity_package = current_top_activity_package.split('/')[0] # install ime self.ime = YosemiteIme(self.adb_client) self.ime.start() # install self._instrument_proc = None self._install_service() # forward if using_proxy: p0, _ = self.adb_client.setup_forward("tcp:10080") p1, _ = self.adb_client.setup_forward("tcp:10081") else: p0 = 10080 p1 = 10081 # start if self._is_running('com.github.uiautomator'): warnings.warn('{} should not run together with "uiautomator". "uiautomator" will be killed.' .format(self.__class__.__name__)) self.adb_client.shell(['am', 'force-stop', 'com.github.uiautomator']) ready = self._start_instrument(p0, force_restart=force_restart) if not ready: # 启动失败则需要卸载再重启,instrument的奇怪之处 uninstall(self.adb_client, PocoServicePackage) self._install_service() ready = self._start_instrument(p0) if current_top_activity_package is not None: current_top_activity2 = self.device.get_top_activity_name() if current_top_activity2 is None or current_top_activity_package not in current_top_activity2: self.device.start_app(current_top_activity_package, activity=True) if not ready: raise RuntimeError("unable to launch AndroidUiautomationPoco") if ready: # 首次启动成功后,在后台线程里监控这个进程的状态,保持让它不退出 self._keep_running_instrumentation(p0) endpoint = "http://{}:{}".format(self.device_ip, p1) agent = AndroidPocoAgent(endpoint, self.ime, use_airtest_input) super(AndroidUiautomationPoco, self).__init__(agent, **options) def _install_service(self): updated = install(self.adb_client, os.path.join(this_dir, 'lib', 'pocoservice-debug.apk')) install(self.adb_client, os.path.join(this_dir, 'lib', 'pocoservice-debug-androidTest.apk'), updated) return updated def _is_running(self, package_name): processes = self.adb_client.shell(['ps']).splitlines() for ps in processes: ps = ps.strip() if ps.endswith(package_name): return True return False def _keep_running_instrumentation(self, port_to_ping): print('[pocoservice.apk] background daemon started.') def loop(): while True: if self._instrument_proc is not None: stdout, stderr = self._instrument_proc.communicate() print('[pocoservice.apk] stdout: {}'.format(stdout)) print('[pocoservice.apk] stderr: {}'.format(stderr)) print('[pocoservice.apk] retrying instrumentation PocoService') self._start_instrument(port_to_ping) # 尝试重启 time.sleep(1) t = threading.Thread(target=loop) t.daemon = True t.start() def _start_instrument(self, port_to_ping, force_restart=False): if not force_restart: try: state = requests.get('http://{}:{}/uiautomation/connectionState'.format(self.device_ip, port_to_ping), timeout=10) state = state.json() if state.get('connected'): # skip starting instrumentation if UiAutomation Service already connected. return True except: pass if self._instrument_proc is not None: if self._instrument_proc.poll() is None: self._instrument_proc.kill() self._instrument_proc = None ready = False self.adb_client.shell(['am', 'force-stop', PocoServicePackage]) # 启动instrument之前,先把主类activity启动起来,不然instrumentation可能失败 self.adb_client.shell('am start -n {}/.TestActivity'.format(PocoServicePackage)) instrumentation_cmd = [ 'am', 'instrument', '-w', '-e', 'debug', 'false', '-e', 'class', '{}.InstrumentedTestAsLauncher'.format(PocoServicePackage), '{}.test/android.support.test.runner.AndroidJUnitRunner'.format(PocoServicePackage)] self._instrument_proc = self.adb_client.start_shell(instrumentation_cmd) def cleanup_proc(proc): def wrapped(): try: proc.kill() except: pass return wrapped atexit.register(cleanup_proc(self._instrument_proc)) time.sleep(2) for i in range(10): try: requests.get('http://{}:{}'.format(self.device_ip, port_to_ping), timeout=10) ready = True break except requests.exceptions.Timeout: break except requests.exceptions.ConnectionError: if self._instrument_proc.poll() is not None: warnings.warn("[pocoservice.apk] instrumentation test server process is no longer alive") stdout = self._instrument_proc.stdout.read() stderr = self._instrument_proc.stderr.read() print('[pocoservice.apk] stdout: {}'.format(stdout)) print('[pocoservice.apk] stderr: {}'.format(stderr)) time.sleep(1) print("still waiting for uiautomation ready.") continue return ready def on_pre_action(self, action, ui, args): if self.screenshot_each_action: # airteset log用 from airtest.core.api import snapshot msg = repr(ui) if not isinstance(msg, six.text_type): msg = msg.decode('utf-8') snapshot(msg=msg) class AndroidUiautomationHelper(object): _nuis = {} @classmethod def get_instance(cls, device): """ This is only a slot to store and get already initialized poco instance rather than initializing again. You can simply pass the ``current device instance`` provided by ``airtest`` to get the AndroidUiautomationPoco instance. If no such AndroidUiautomationPoco instance, a new instance will be created and stored. Args: device (:py:obj:`airtest.core.device.Device`): more details refer to ``airtest doc`` Returns: poco instance """ if cls._nuis.get(device) is None: cls._nuis[device] = AndroidUiautomationPoco(device) return cls._nuis[device]
common.py
# -*- coding: utf-8 -*- """ Classes used both by front-end and back-end """ import os.path import platform import site import sys import tokenize from collections import namedtuple from typing import List, Optional # @UnusedImport import subprocess import logging import traceback from threading import Thread MESSAGE_MARKER = "\x02" ValueInfo = namedtuple("ValueInfo", ["id", "repr"]) FrameInfo = namedtuple( "FrameInfo", [ "id", "filename", "module_name", "code_name", "source", "lineno", "firstlineno", "in_library", "locals", "globals", "freevars", "event", "focus", "node_tags", "current_statement", "current_root_expression", "current_evaluations", ], ) TextRange = namedtuple("TextRange", ["lineno", "col_offset", "end_lineno", "end_col_offset"]) class Record: def __init__(self, **kw): self.__dict__.update(kw) def update(self, e, **kw): self.__dict__.update(e, **kw) def setdefault(self, **kw): "updates those fields that are not yet present (similar to dict.setdefault)" for key in kw: if not hasattr(self, key): setattr(self, key, kw[key]) def get(self, key, default=None): return self.__dict__.get(key, default) def __getitem__(self, key): return self.__dict__[key] def __delitem__(self, key): self.__dict__.__delitem__(key) def __setitem__(self, key, value): self.__dict__[key] = value def __contains__(self, key): return key in self.__dict__ def __repr__(self): keys = self.__dict__.keys() items = ("{}={}".format(k, repr(self.__dict__[k])) for k in keys) return "{}({})".format(self.__class__.__name__, ", ".join(items)) def __str__(self): keys = sorted(self.__dict__.keys()) items = ("{}={}".format(k, repr(self.__dict__[k])) for k in keys) return "{}({})".format(self.__class__.__name__, ", ".join(items)) def __eq__(self, other): # pylint: disable=unidiomatic-typecheck if type(self) != type(other): return False if len(self.__dict__) != len(other.__dict__): return False for key in self.__dict__: if not hasattr(other, key): return False self_value = getattr(self, key) other_value = getattr(other, key) if type(self_value) != type(other_value) or self_value != other_value: return False return True def __ne__(self, other): return not self.__eq__(other) def __hash__(self): return hash(repr(self)) def range_contains_smaller(one: TextRange, other: TextRange) -> bool: this_start = (one.lineno, one.col_offset) this_end = (one.end_lineno, one.end_col_offset) other_start = (other.lineno, other.col_offset) other_end = (other.end_lineno, other.end_col_offset) return ( this_start < other_start and this_end > other_end or this_start == other_start and this_end > other_end or this_start < other_start and this_end == other_end ) def range_contains_smaller_or_equal(one: TextRange, other: TextRange) -> bool: return range_contains_smaller(one, other) or one == other class InputSubmission(Record): """For sending data to backend's stdin""" def __init__(self, data: str, **kw) -> None: super().__init__(**kw) self.data = data class CommandToBackend(Record): """Command meant for the back-end""" def __init__(self, name: str, **kw) -> None: super().__init__(**kw) self.name = name class InterruptCommand(CommandToBackend): def __init__(self, **kw) -> None: if "name" in kw: del kw["name"] super().__init__("interrupt", **kw) class EOFCommand(CommandToBackend): def __init__(self, **kw) -> None: if "name" in kw: del kw["name"] super().__init__("eof", **kw) class ToplevelCommand(CommandToBackend): def __init__(self, name: str, argv: List[str] = [], **kw) -> None: super().__init__(name, **kw) self.argv = argv class DebuggerCommand(CommandToBackend): pass class InlineCommand(CommandToBackend): """ Can be used both during debugging and in waiting_toplevel_command state (eg. for sending variable and heap info requests) """ pass class MessageFromBackend(Record): def __init__(self, **kw) -> None: self.event_type = type(self).__name__ # allow event_type to be overridden by kw super().__init__(**kw) if not hasattr(self, "sequence"): self.sequence = self.event_type class ToplevelResponse(MessageFromBackend): pass class DebuggerResponse(MessageFromBackend): pass class BackendEvent(MessageFromBackend): def __init__(self, event_type: str, **kw) -> None: super().__init__(**kw) self.event_type = event_type class InlineResponse(MessageFromBackend): def __init__(self, command_name: str, **kw) -> None: super().__init__(**kw) self.command_name = command_name self.event_type = self.command_name + "_response" def serialize_message(msg: Record) -> str: # I want to transfer only ASCII chars because encodings are not reliable # (eg. can't find a way to specify PYTHONIOENCODING for cx_freeze'd program) return MESSAGE_MARKER + repr(msg).encode("UTF-7").decode("ASCII") def parse_message(msg_string: str) -> Record: # DataFrames may have nan # pylint: disable=unused-variable nan = float("nan") # @UnusedVariable assert msg_string[0] == MESSAGE_MARKER return eval(msg_string[1:].encode("ASCII").decode("UTF-7")) def normpath_with_actual_case(name: str) -> str: """In Windows return the path with the case it is stored in the filesystem""" assert os.path.isabs(name) or os.path.ismount(name), "Not abs nor mount: " + name assert os.path.exists(name), "Not exists: " + name if os.name == "nt": # https://stackoverflow.com/questions/2113822/python-getting-filename-case-as-stored-in-windows/2114975 name = os.path.normpath(name) from ctypes import create_unicode_buffer, windll buf = create_unicode_buffer(512) # GetLongPathNameW alone doesn't fix filename part windll.kernel32.GetShortPathNameW(name, buf, 512) # @UndefinedVariable windll.kernel32.GetLongPathNameW(buf.value, buf, 512) # @UndefinedVariable result = buf.value if result.casefold() != name.casefold(): # Sometimes GetShortPathNameW + GetLongPathNameW doesn't work # see eg. https://github.com/thonny/thonny/issues/925 windll.kernel32.GetLongPathNameW(name, buf, 512) # @UndefinedVariable result = buf.value if result.casefold() != name.casefold(): result = name if result[1] == ":": # ensure drive letter is capital return result[0].upper() + result[1:] else: return result else: # easy on Linux # too difficult on mac # https://stackoverflow.com/questions/14515073/in-python-on-osx-with-hfs-how-can-i-get-the-correct-case-of-an-existing-filenam # Hopefully only correct case comes into Thonny (eg. via open dialog) return os.path.normpath(name) def is_same_path(name1: str, name2: str) -> bool: return os.path.normpath(os.path.normcase(name1)) == os.path.normpath(os.path.normcase(name2)) def path_startswith(child_name: str, dir_name: str) -> bool: normchild = os.path.normpath(os.path.normcase(child_name)) normdir = os.path.normpath(os.path.normcase(dir_name)) return normdir == normchild or normchild.startswith(normdir.rstrip(os.path.sep) + os.path.sep) def read_source(filename): with tokenize.open(filename) as fp: return fp.read() def get_exe_dirs(): result = [] if site.ENABLE_USER_SITE: if platform.system() == "Windows": if site.getusersitepackages(): result.append(site.getusersitepackages().replace("site-packages", "Scripts")) else: if site.getuserbase(): result.append(site.getuserbase() + "/bin") main_scripts = os.path.join(sys.prefix, "Scripts") if os.path.isdir(main_scripts) and main_scripts not in result: result.append(main_scripts) if os.path.dirname(sys.executable) not in result: result.append(os.path.dirname(sys.executable)) return result def get_site_dir(symbolic_name, executable=None): if not executable or executable == sys.executable: result = getattr(site, symbolic_name, "") else: result = ( subprocess.check_output( [executable, "-m", "site", "--" + symbolic_name.lower().replace("_", "-")], universal_newlines=True, ) .decode() .strip() ) return result if result else None def get_base_executable(): if sys.exec_prefix == sys.base_exec_prefix: return sys.executable if platform.system() == "Windows": result = sys.base_exec_prefix + "\\" + os.path.basename(sys.executable) result = normpath_with_actual_case(result) else: result = sys.executable.replace(sys.exec_prefix, sys.base_exec_prefix) if not os.path.isfile(result): raise RuntimeError("Can't locate base executable") return result def get_augmented_system_path(extra_dirs): path_items = os.environ.get("PATH", "").split(os.pathsep) for d in reversed(extra_dirs): if d not in path_items: path_items.insert(0, d) return os.pathsep.join(path_items) def update_system_path(env, value): # in Windows, env keys are not case sensitive # this is important if env is a dict (not os.environ) if platform.system() == "Windows": found = False for key in env: if key.upper() == "PATH": found = True env[key] = value if not found: env["PATH"] = value else: env["PATH"] = value class UserError(RuntimeError): """Errors of this class are meant to be presented without stacktrace""" pass def is_hidden_or_system_file(path: str) -> bool: if os.path.basename(path).startswith("."): return True elif platform.system() == "Windows": from ctypes import windll FILE_ATTRIBUTE_HIDDEN = 0x2 FILE_ATTRIBUTE_SYSTEM = 0x4 return bool( windll.kernel32.GetFileAttributesW(path) # @UndefinedVariable & (FILE_ATTRIBUTE_HIDDEN | FILE_ATTRIBUTE_SYSTEM) ) else: return False def get_dirs_child_data(paths): """Used for populating local file browser's tree view. dir_paths contains full paths of the open directories. Returns information required for refreshing this view""" res = {} for path in paths: # assuming the path already has actual case res[path] = get_single_dir_child_data(path) return res def get_single_dir_child_data(path): if path == "": if platform.system() == "Windows": return {**get_windows_volumes_info(), **get_windows_network_locations()} else: return get_single_dir_child_data("/") elif os.path.isdir(path) or os.path.ismount(path): result = {} try: for child in os.listdir(path): full_child_path = os.path.join(path, child) if not os.path.exists(full_child_path): # must be broken link continue full_child_path = normpath_with_actual_case(full_child_path) if not is_hidden_or_system_file(full_child_path): st = os.stat(full_child_path, dir_fd=None, follow_symlinks=True) name = os.path.basename(full_child_path) result[name] = { "size": None if os.path.isdir(full_child_path) else st.st_size, "time": max(st.st_mtime, st.st_ctime), } except PermissionError: result["<not accessible>"] = {"kind": "error", "size": -1, "time": None} return result else: return None def get_windows_volumes_info(): # http://stackoverflow.com/a/2288225/261181 # http://msdn.microsoft.com/en-us/library/windows/desktop/aa364939%28v=vs.85%29.aspx import string from ctypes import windll all_drive_types = [ "DRIVE_UNKNOWN", "DRIVE_NO_ROOT_DIR", "DRIVE_REMOVABLE", "DRIVE_FIXED", "DRIVE_REMOTE", "DRIVE_CDROM", "DRIVE_RAMDISK", ] required_drive_types = ["DRIVE_REMOVABLE", "DRIVE_FIXED", "DRIVE_REMOTE", "DRIVE_RAMDISK"] result = {} bitmask = windll.kernel32.GetLogicalDrives() # @UndefinedVariable for letter in string.ascii_uppercase: if not bitmask & 1: pass else: drive_type = all_drive_types[ windll.kernel32.GetDriveTypeW("%s:\\" % letter) ] # @UndefinedVariable # NB! Drive A can be present in bitmask but actually missing. # In this case querying information about it would freeze the UI # for several seconds. # One solution is to uninstall the device in device manager, # but OS may restore the drive later. # Therefore it is safest to skip A drive (user can access it via Open dialog) if drive_type in required_drive_types and ( letter != "A" or drive_type != "DRIVE_REMOVABLE" ): drive = letter + ":" path = drive + "\\" try: st = os.stat(path) volume_name = get_windows_volume_name(path) if not volume_name: volume_name = "Disk" label = volume_name + " (" + drive + ")" result[path] = { "label": label, "size": None, "time": max(st.st_mtime, st.st_ctime), } except PermissionError: traceback.print_exc() # probably an empty cardreader slot bitmask >>= 1 return result def get_windows_volume_name(path): # https://stackoverflow.com/a/12056414/261181 import ctypes kernel32 = ctypes.windll.kernel32 volume_name_buffer = ctypes.create_unicode_buffer(1024) file_system_name_buffer = ctypes.create_unicode_buffer(1024) serial_number = None max_component_length = None file_system_flags = None result = kernel32.GetVolumeInformationW( ctypes.c_wchar_p(path), volume_name_buffer, ctypes.sizeof(volume_name_buffer), serial_number, max_component_length, file_system_flags, file_system_name_buffer, ctypes.sizeof(file_system_name_buffer), ) if result: return volume_name_buffer.value else: return None def get_windows_network_locations(): import ctypes.wintypes CSIDL_NETHOOD = 0x13 SHGFP_TYPE_CURRENT = 0 buf = ctypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH) ctypes.windll.shell32.SHGetFolderPathW(0, CSIDL_NETHOOD, 0, SHGFP_TYPE_CURRENT, buf) shortcuts_dir = buf.value result = {} for entry in os.scandir(shortcuts_dir): # full_path = normpath_with_actual_case(entry.path) lnk_path = os.path.join(entry.path, "target.lnk") if os.path.exists(lnk_path): try: target = get_windows_lnk_target(lnk_path) result[target] = { "label": entry.name + " (" + target + ")", "size": None, "time": None, } except: logging.getLogger("thonny").error( "Can't get target from %s", lnk_path, exc_info=True ) return result def get_windows_lnk_target(lnk_file_path): import thonny script_path = os.path.join(os.path.dirname(thonny.__file__), "res", "PrintLnkTarget.vbs") cmd = ["cscript", "/NoLogo", script_path, lnk_file_path] result = subprocess.check_output(cmd, universal_newlines=True, timeout=3) return result.strip() def execute_system_command(cmd, cwd=None, disconnect_stdin=False): env = dict(os.environ).copy() encoding = "utf-8" env["PYTHONIOENCODING"] = encoding # Make sure this python interpreter and its scripts are available # in PATH update_system_path(env, get_augmented_system_path(get_exe_dirs())) popen_kw = dict( stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, env=env, universal_newlines=True, bufsize=0, ) if cwd and os.path.isdir(cwd): popen_kw["cwd"] = cwd if disconnect_stdin: popen_kw["stdin"] = subprocess.DEVNULL if sys.version_info >= (3, 6): popen_kw["errors"] = "replace" popen_kw["encoding"] = encoding assert cmd.cmd_line.startswith("!") cmd_line = cmd.cmd_line[1:] proc = subprocess.Popen(cmd_line, **popen_kw) def copy_stream(source, target): while True: c = source.readline() if c == "": break else: target.write(c) target.flush() copy_out = Thread(target=lambda: copy_stream(proc.stdout, sys.stdout), daemon=True) copy_err = Thread(target=lambda: copy_stream(proc.stderr, sys.stderr), daemon=True) copy_out.start() copy_err.start() try: proc.wait() except KeyboardInterrupt as e: print(str(e), file=sys.stderr) copy_out.join() copy_err.join()
camera_pi.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # camera_pi.py # # # # Raspberry Pi camera module (developed by Miguel Grinberg) import time import io import threading import picamera class Camera(object): thread = None # background thread that reads frames from camera frame = None # current frame is stored here by background thread last_access = 0 # time of last client access to the camera def initialize(self): if Camera.thread is None: # start background frame thread Camera.thread = threading.Thread(target=self._thread) Camera.thread.start() # wait until frames start to be available while self.frame is None: time.sleep(0) def get_frame(self): Camera.last_access = time.time() self.initialize() return self.frame @classmethod def _thread(cls): with picamera.PiCamera() as camera: # camera setup camera.resolution = (640, 480) camera.framerate = 30 camera.hflip = False camera.vflip = False # let camera warm up camera.start_preview() time.sleep(2) stream = io.BytesIO() for foo in camera.capture_continuous(stream, 'jpeg', use_video_port=True): # store frame stream.seek(0) cls.frame = stream.read() # reset stream for next frame stream.seek(0) stream.truncate() # if there hasn't been any clients asking for frames in # the last 10 seconds stop the thread if time.time() - cls.last_access > 10: break cls.thread = None
gui.py
# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE). # http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This program is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software # Foundation; either version 2 of the License, or (at your option) any later # version. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """Tkinker gui for pylint""" from __future__ import print_function import os import sys import re from threading import Thread import six from six.moves.tkinter import ( Tk, Frame, Listbox, Entry, Label, Button, Scrollbar, Checkbutton, Radiobutton, IntVar, StringVar, PanedWindow, TOP, LEFT, RIGHT, BOTTOM, END, X, Y, BOTH, SUNKEN, W, HORIZONTAL, DISABLED, NORMAL, W, ) from six.moves.tkinter_tkfiledialog import ( askopenfilename, askdirectory, ) import pylint.lint from pylint.reporters.guireporter import GUIReporter HOME = os.path.expanduser('~/') HISTORY = '.pylint-gui-history' COLORS = {'(I)':'green', '(C)':'blue', '(R)':'darkblue', '(W)':'black', '(E)':'darkred', '(F)':'red'} def convert_to_string(msg): """make a string representation of a message""" module_object = msg.module if msg.obj: module_object += ".%s" % msg.obj return "(%s) %s [%d]: %s" % (msg.C, module_object, msg.line, msg.msg) class BasicStream(object): ''' used in gui reporter instead of writing to stdout, it is written to this stream and saved in contents ''' def __init__(self, gui): """init""" self.curline = "" self.gui = gui self.contents = [] self.outdict = {} self.currout = None self.next_title = None def write(self, text): """write text to the stream""" if re.match('^--+$', text.strip()) or re.match('^==+$', text.strip()): if self.currout: self.outdict[self.currout].remove(self.next_title) self.outdict[self.currout].pop() self.currout = self.next_title self.outdict[self.currout] = [''] if text.strip(): self.next_title = text.strip() if text.startswith(os.linesep): self.contents.append('') if self.currout: self.outdict[self.currout].append('') self.contents[-1] += text.strip(os.linesep) if self.currout: self.outdict[self.currout][-1] += text.strip(os.linesep) if text.endswith(os.linesep) and text.strip(): self.contents.append('') if self.currout: self.outdict[self.currout].append('') def fix_contents(self): """finalize what the contents of the dict should look like before output""" for item in self.outdict: num_empty = self.outdict[item].count('') for _ in range(num_empty): self.outdict[item].remove('') if self.outdict[item]: self.outdict[item].pop(0) def output_contents(self): """output contents of dict to the gui, and set the rating""" self.fix_contents() self.gui.tabs = self.outdict try: self.gui.rating.set(self.outdict['Global evaluation'][0]) except KeyError: self.gui.rating.set('Error') self.gui.refresh_results_window() #reset stream variables for next run self.contents = [] self.outdict = {} self.currout = None self.next_title = None class LintGui(object): """Build and control a window to interact with pylint""" def __init__(self, root=None): """init""" self.root = root or Tk() self.root.title('Pylint') #reporter self.reporter = None #message queue for output from reporter self.msg_queue = six.moves.queue.Queue() self.msgs = [] self.visible_msgs = [] self.filenames = [] self.rating = StringVar() self.tabs = {} self.report_stream = BasicStream(self) #gui objects self.lb_messages = None self.showhistory = None self.results = None self.btnRun = None self.information_box = None self.convention_box = None self.refactor_box = None self.warning_box = None self.error_box = None self.fatal_box = None self.txtModule = None self.status = None self.msg_type_dict = None self.init_gui() def init_gui(self): """init helper""" window = PanedWindow(self.root, orient="vertical") window.pack(side=TOP, fill=BOTH, expand=True) top_pane = Frame(window) window.add(top_pane) mid_pane = Frame(window) window.add(mid_pane) bottom_pane = Frame(window) window.add(bottom_pane) #setting up frames top_frame = Frame(top_pane) mid_frame = Frame(top_pane) history_frame = Frame(top_pane) radio_frame = Frame(mid_pane) rating_frame = Frame(mid_pane) res_frame = Frame(mid_pane) check_frame = Frame(bottom_pane) msg_frame = Frame(bottom_pane) btn_frame = Frame(bottom_pane) top_frame.pack(side=TOP, fill=X) mid_frame.pack(side=TOP, fill=X) history_frame.pack(side=TOP, fill=BOTH, expand=True) radio_frame.pack(side=TOP, fill=X) rating_frame.pack(side=TOP, fill=X) res_frame.pack(side=TOP, fill=BOTH, expand=True) check_frame.pack(side=TOP, fill=X) msg_frame.pack(side=TOP, fill=BOTH, expand=True) btn_frame.pack(side=TOP, fill=X) # Binding F5 application-wide to run lint self.root.bind('<F5>', self.run_lint) #Message ListBox rightscrollbar = Scrollbar(msg_frame) rightscrollbar.pack(side=RIGHT, fill=Y) bottomscrollbar = Scrollbar(msg_frame, orient=HORIZONTAL) bottomscrollbar.pack(side=BOTTOM, fill=X) self.lb_messages = Listbox( msg_frame, yscrollcommand=rightscrollbar.set, xscrollcommand=bottomscrollbar.set, bg="white") self.lb_messages.bind("<Double-Button-1>", self.show_sourcefile) self.lb_messages.pack(expand=True, fill=BOTH) rightscrollbar.config(command=self.lb_messages.yview) bottomscrollbar.config(command=self.lb_messages.xview) #History ListBoxes rightscrollbar2 = Scrollbar(history_frame) rightscrollbar2.pack(side=RIGHT, fill=Y) bottomscrollbar2 = Scrollbar(history_frame, orient=HORIZONTAL) bottomscrollbar2.pack(side=BOTTOM, fill=X) self.showhistory = Listbox( history_frame, yscrollcommand=rightscrollbar2.set, xscrollcommand=bottomscrollbar2.set, bg="white") self.showhistory.pack(expand=True, fill=BOTH) rightscrollbar2.config(command=self.showhistory.yview) bottomscrollbar2.config(command=self.showhistory.xview) self.showhistory.bind('<Double-Button-1>', self.select_recent_file) self.set_history_window() #status bar self.status = Label(self.root, text="", bd=1, relief=SUNKEN, anchor=W) self.status.pack(side=BOTTOM, fill=X) #labelbl_ratingls lbl_rating_label = Label(rating_frame, text='Rating:') lbl_rating_label.pack(side=LEFT) lbl_rating = Label(rating_frame, textvariable=self.rating) lbl_rating.pack(side=LEFT) Label(mid_frame, text='Recently Used:').pack(side=LEFT) Label(top_frame, text='Module or package').pack(side=LEFT) #file textbox self.txt_module = Entry(top_frame, background='white') self.txt_module.bind('<Return>', self.run_lint) self.txt_module.pack(side=LEFT, expand=True, fill=X) #results box rightscrollbar = Scrollbar(res_frame) rightscrollbar.pack(side=RIGHT, fill=Y) bottomscrollbar = Scrollbar(res_frame, orient=HORIZONTAL) bottomscrollbar.pack(side=BOTTOM, fill=X) self.results = Listbox( res_frame, yscrollcommand=rightscrollbar.set, xscrollcommand=bottomscrollbar.set, bg="white", font="Courier") self.results.pack(expand=True, fill=BOTH, side=BOTTOM) rightscrollbar.config(command=self.results.yview) bottomscrollbar.config(command=self.results.xview) #buttons Button(top_frame, text='Open', command=self.file_open).pack(side=LEFT) Button(top_frame, text='Open Package', command=(lambda: self.file_open(package=True))).pack(side=LEFT) self.btnRun = Button(top_frame, text='Run', command=self.run_lint) self.btnRun.pack(side=LEFT) Button(btn_frame, text='Quit', command=self.quit).pack(side=BOTTOM) #radio buttons self.information_box = IntVar() self.convention_box = IntVar() self.refactor_box = IntVar() self.warning_box = IntVar() self.error_box = IntVar() self.fatal_box = IntVar() i = Checkbutton(check_frame, text="Information", fg=COLORS['(I)'], variable=self.information_box, command=self.refresh_msg_window) c = Checkbutton(check_frame, text="Convention", fg=COLORS['(C)'], variable=self.convention_box, command=self.refresh_msg_window) r = Checkbutton(check_frame, text="Refactor", fg=COLORS['(R)'], variable=self.refactor_box, command=self.refresh_msg_window) w = Checkbutton(check_frame, text="Warning", fg=COLORS['(W)'], variable=self.warning_box, command=self.refresh_msg_window) e = Checkbutton(check_frame, text="Error", fg=COLORS['(E)'], variable=self.error_box, command=self.refresh_msg_window) f = Checkbutton(check_frame, text="Fatal", fg=COLORS['(F)'], variable=self.fatal_box, command=self.refresh_msg_window) i.select() c.select() r.select() w.select() e.select() f.select() i.pack(side=LEFT) c.pack(side=LEFT) r.pack(side=LEFT) w.pack(side=LEFT) e.pack(side=LEFT) f.pack(side=LEFT) #check boxes self.box = StringVar() # XXX should be generated report = Radiobutton( radio_frame, text="Report", variable=self.box, value="Report", command=self.refresh_results_window) raw_met = Radiobutton( radio_frame, text="Raw metrics", variable=self.box, value="Raw metrics", command=self.refresh_results_window) dup = Radiobutton( radio_frame, text="Duplication", variable=self.box, value="Duplication", command=self.refresh_results_window) ext = Radiobutton( radio_frame, text="External dependencies", variable=self.box, value="External dependencies", command=self.refresh_results_window) stat = Radiobutton( radio_frame, text="Statistics by type", variable=self.box, value="Statistics by type", command=self.refresh_results_window) msg_cat = Radiobutton( radio_frame, text="Messages by category", variable=self.box, value="Messages by category", command=self.refresh_results_window) msg = Radiobutton( radio_frame, text="Messages", variable=self.box, value="Messages", command=self.refresh_results_window) source_file = Radiobutton( radio_frame, text="Source File", variable=self.box, value="Source File", command=self.refresh_results_window) report.select() report.grid(column=0, row=0, sticky=W) raw_met.grid(column=1, row=0, sticky=W) dup.grid(column=2, row=0, sticky=W) msg.grid(column=3, row=0, sticky=W) stat.grid(column=0, row=1, sticky=W) msg_cat.grid(column=1, row=1, sticky=W) ext.grid(column=2, row=1, sticky=W) source_file.grid(column=3, row=1, sticky=W) #dictionary for check boxes and associated error term self.msg_type_dict = { 'I': lambda: self.information_box.get() == 1, 'C': lambda: self.convention_box.get() == 1, 'R': lambda: self.refactor_box.get() == 1, 'E': lambda: self.error_box.get() == 1, 'W': lambda: self.warning_box.get() == 1, 'F': lambda: self.fatal_box.get() == 1 } self.txt_module.focus_set() def select_recent_file(self, event): # pylint: disable=unused-argument """adds the selected file in the history listbox to the Module box""" if not self.showhistory.size(): return selected = self.showhistory.curselection() item = self.showhistory.get(selected) #update module self.txt_module.delete(0, END) self.txt_module.insert(0, item) def refresh_msg_window(self): """refresh the message window with current output""" #clear the window self.lb_messages.delete(0, END) self.visible_msgs = [] for msg in self.msgs: if self.msg_type_dict.get(msg.C)(): self.visible_msgs.append(msg) msg_str = convert_to_string(msg) self.lb_messages.insert(END, msg_str) fg_color = COLORS.get(msg_str[:3], 'black') self.lb_messages.itemconfigure(END, fg=fg_color) def refresh_results_window(self): """refresh the results window with current output""" #clear the window self.results.delete(0, END) try: for res in self.tabs[self.box.get()]: self.results.insert(END, res) except KeyError: pass def process_incoming(self): """process the incoming messages from running pylint""" while self.msg_queue.qsize(): try: msg = self.msg_queue.get(0) if msg == "DONE": self.report_stream.output_contents() return False #adding message to list of msgs self.msgs.append(msg) #displaying msg if message type is selected in check box if self.msg_type_dict.get(msg.C)(): self.visible_msgs.append(msg) msg_str = convert_to_string(msg) self.lb_messages.insert(END, msg_str) fg_color = COLORS.get(msg_str[:3], 'black') self.lb_messages.itemconfigure(END, fg=fg_color) except six.moves.queue.Empty: pass return True def periodic_call(self): """determine when to unlock the run button""" if self.process_incoming(): self.root.after(100, self.periodic_call) else: #enabling button so it can be run again self.btnRun.config(state=NORMAL) def mainloop(self): """launch the mainloop of the application""" self.root.mainloop() def quit(self, _=None): """quit the application""" self.root.quit() def halt(self): # pylint: disable=no-self-use """program halt placeholder""" return def file_open(self, package=False, _=None): """launch a file browser""" if not package: filename = askopenfilename(parent=self.root, filetypes=[('pythonfiles', '*.py'), ('allfiles', '*')], title='Select Module') else: filename = askdirectory(title="Select A Folder", mustexist=1) if filename == (): return self.txt_module.delete(0, END) self.txt_module.insert(0, filename) def update_filenames(self): """update the list of recent filenames""" filename = self.txt_module.get() if not filename: filename = os.getcwd() if filename+'\n' in self.filenames: index = self.filenames.index(filename+'\n') self.filenames.pop(index) #ensure only 10 most recent are stored if len(self.filenames) == 10: self.filenames.pop() self.filenames.insert(0, filename+'\n') def set_history_window(self): """update the history window with info from the history file""" #clear the window self.showhistory.delete(0, END) # keep the last 10 most recent files try: view_history = open(HOME+HISTORY, 'r') for hist in view_history.readlines(): if not hist in self.filenames: self.filenames.append(hist) self.showhistory.insert(END, hist.split('\n')[0]) view_history.close() except IOError: # do nothing since history file will be created later return def run_lint(self, _=None): """launches pylint""" self.update_filenames() self.root.configure(cursor='watch') self.reporter = GUIReporter(self, output=self.report_stream) module = self.txt_module.get() if not module: module = os.getcwd() #cleaning up msgs and windows self.msgs = [] self.visible_msgs = [] self.lb_messages.delete(0, END) self.tabs = {} self.results.delete(0, END) self.btnRun.config(state=DISABLED) #setting up a worker thread to run pylint worker = Thread(target=lint_thread, args=(module, self.reporter, self,)) self.periodic_call() worker.start() # Overwrite the .pylint-gui-history file with all the new recently added files # in order from filenames but only save last 10 files write_history = open(HOME+HISTORY, 'w') write_history.writelines(self.filenames) write_history.close() self.set_history_window() self.root.configure(cursor='') def show_sourcefile(self, event=None): # pylint: disable=unused-argument selected = self.lb_messages.curselection() if not selected: return msg = self.visible_msgs[int(selected[0])] scroll = msg.line - 3 if scroll < 0: scroll = 0 self.tabs["Source File"] = open(msg.path, "r").readlines() self.box.set("Source File") self.refresh_results_window() self.results.yview(scroll) self.results.select_set(msg.line - 1) def lint_thread(module, reporter, gui): """thread for pylint""" gui.status.text = "processing module(s)" pylint.lint.Run(args=[module], reporter=reporter, exit=False) gui.msg_queue.put("DONE") def Run(args): """launch pylint gui from args""" if args: print('USAGE: pylint-gui\n launch a simple pylint gui using Tk') sys.exit(1) gui = LintGui() gui.mainloop() sys.exit(0) if __name__ == '__main__': Run(sys.argv[1:])
gdrive.py
#!/usr/bin/env python3 """A script to interact with your Google Drive files using the terminal""" import os import io import pickle import mimetypes import threading import time from argparse import ArgumentParser from os.path import expanduser, join from google_auth_oauthlib.flow import InstalledAppFlow from google.auth.transport.requests import Request from googleapiclient.discovery import build from googleapiclient.http import MediaIoBaseDownload from googleapiclient.http import MediaFileUpload from googleapiclient.errors import HttpError class GoogleCredentials(): """Handles the local Credentials file for the Google Drive API""" HOME = expanduser("~") GDRIVE_PATH = join(HOME, '.gdrive') CREDENTIALS_PATH = join(GDRIVE_PATH, 'credentials.json') TOKEN_PATH = join(GDRIVE_PATH, 'token.pickle') SCOPES = [ 'https://www.googleapis.com/auth/drive', 'https://www.googleapis.com/auth/drive.metadata' ] def __init__(self): self.__creds = None def build(self): self.load_credentials() if not self.is_credentials_valid(): self.prepare_credentials() return self.__creds def prepare_credentials(self): if self.__creds and self.is_credentials_outdated(): self.__creds.refresh(Request()) else: self.create_credentials() self.save_credentials() def load_credentials(self): self.__creds = None if os.path.exists(self.TOKEN_PATH): with open(self.TOKEN_PATH, 'rb') as token: self.__creds = pickle.load(token) def is_credentials_valid(self): return self.__creds is not None and self.__creds.valid def is_credentials_outdated(self): return self.__creds.expired and self.__creds.refresh_token def create_credentials(self): if os.path.exists(self.CREDENTIALS_PATH): flow = InstalledAppFlow.from_client_secrets_file(self.CREDENTIALS_PATH, self.SCOPES) self.__creds = flow.run_local_server(port=0) else: print('Credentials not found at %s' % self.CREDENTIALS_PATH) exit() def save_credentials(self): with open(self.TOKEN_PATH, 'wb') as token: pickle.dump(self.__creds, token) class GoogleService(): """Encapsulates Google Drive API, provides usability methods and keeps API-side configurations""" UPLOAD_CHUNK_SIZE = 1024 * 1024 * 128 # 128MB DOWNLOAD_CHUNK_SIZE = 1024 * 1024 * 128 # 128MB def __init__(self): creds = GoogleCredentials().build() self.__google = build('drive', 'v3', credentials=creds) def is_valid(self): return self.__google is not None def drive(self): # Google Drive resource is dynamically populated # pylint: disable=maybe-no-member return self.__google.files() def get_file_metadata(self, file_id): try: fields = 'id, name, size, modifiedTime, modifiedByMeTime, owners' return self.drive().get(fileId=file_id, fields=fields).execute() except HttpError: return None def search_filename(self, filename=None, page_size=1, page_token=''): query = '' if filename is not None: # Could also be name = '%s' for an exact search query = "name contains '%s'" % filename fields = ('nextPageToken, files/id, files/name, files/size, ' 'files/modifiedTime, files/modifiedByMeTime, files/owners') search = self.drive().list( q=query, orderBy='modifiedByMeTime desc', fields=fields, pageSize=page_size, pageToken=page_token ).execute() files_found = search.get('files', []) if files_found and page_size == 1: return files_found[0], None next_page = search.get('nextPageToken', None) return files_found, next_page def get_last_modified_file(self): file, _ = self.search_filename(None) return file def download(self, file_id, file_name): request = self.drive().get_media(fileId=file_id) file_handler = io.FileIO(file_name, 'wb') return MediaIoBaseDownload(file_handler, request, chunksize=self.DOWNLOAD_CHUNK_SIZE) def upload(self, filepath, mime_type): filename = filepath.split('/')[-1] file_metadata = {'name': filename} print('Uploading the file: %s' % filename) media = MediaFileUpload( filepath, mimetype=mime_type, resumable=True, chunksize=self.UPLOAD_CHUNK_SIZE ) return self.drive().create( body=file_metadata, media_body=media, fields='id' ) class Command(): """Commands handle their own argument parser and use GoogleService to execute operations""" TYPE = None HELP = None def __init__(self, args): self.args = args self.google = GoogleService() @staticmethod def add_to_subparser(subparsers): """Configures ArgParser for the command""" pass def execute(self): pass def is_service_started(self): return self.google.is_valid() def conclude_operation_while_logging(self, operation, title): logger = ProgressLogger(title) try: result = None while not result: status, result = operation.next_chunk() if status: logger.send(status) logger.close() return result except Exception as err: print('An error happened while %s' % title) print(err) logger.close() class Download(Command): TYPE = "download" HELP = "Download a file from Google Drive through ID or Filename. Use -l to download the last modified file." @staticmethod def add_to_subparser(subparsers): parser = subparsers.add_parser( Download.TYPE, help=Download.HELP ) parser.set_defaults(command=Download) parser.add_argument( 'file', metavar='FILE', nargs='*', help="Name or ID of the file to Download. If not specified, will try as ID then as Name." ) name_or_id = parser.add_mutually_exclusive_group() name_or_id.add_argument( '--name', '-n', help='Will use FILE as a filename to search', action='store_true' ) name_or_id.add_argument( '--id', '-i', help='Will use FILE as a file ID to download', action='store_true' ) parser.add_argument( '--last', '-l', help="Downloads the last modified file", action='store_true' ) def execute(self): if self.args.last: return self.download_last_uploaded_file() if not self.args.file: print('Please inform the ID or name of the file you want to download. Exiting...') return self.file = " ".join(self.args.file) if self.args.name: return self.download_filename() if self.args.id: return self.download_id() return self.try_download_id_then_name() def download_last_uploaded_file(self): last_file = self.google.get_last_modified_file() if not last_file: print("Could not find any file") return self.download_from_metadata(last_file) def download_filename(self): file_found, _ = self.google.search_filename(self.file) if not file_found: print("Could not find any file that contains '%s' on the name" % self.file) return return self.download_from_metadata(file_found) def download_id(self): file_found = self.google.get_file_metadata(self.file) if not file_found: print("Could not find file with '%s' as ID" % self.file) return return self.download_from_metadata(file_found) def try_download_id_then_name(self): file_id_found = self.google.get_file_metadata(self.file) if file_id_found: return self.download_from_metadata(file_id_found) self.download_filename() def download_from_metadata(self, metadata): if metadata is None: print('Could not find file to download') return file_name = metadata["name"] or "Unknown filename" Utils.describe_file(metadata) self.download(metadata["id"], file_name) def download(self, file_id, file_name): try: downloader = self.google.download(file_id, file_name) print("Downloading 0%", end='\r') self.conclude_operation_while_logging(downloader, "Downloading") print("Downloaded 100% ") except Exception as err: print('An error happened while downloading the file.') print(err) class Upload(Command): TYPE = "upload" HELP = "Upload a file to Google Drive. Accepts wildcards and can find the last modified file." @staticmethod def add_to_subparser(subparsers): parser = subparsers.add_parser( Upload.TYPE, help=Upload.HELP ) parser.set_defaults(command=Upload) parser.add_argument( 'file', metavar='FILE', nargs='+', help="Path or filename of the file to be uploaded (accepts wildcards)" ) parser.add_argument( '--last', '-l', help="Uploads the last modified file if a wildcard is used", action='store_true' ) def execute(self): self.set_filepath_for_first_arg() if (self.args.last and len(self.args.file) > 1): self.set_filepath_for_last_modified() self.set_mimetype() return self.upload() def set_filepath_for_first_arg(self): self.filepath = self.args.file[0] def set_filepath_for_last_modified(self): print('Found %s files' % len(self.args.file)) last_modified = self.find_last_modified_file(self.args.file) self.filepath = last_modified def find_last_modified_file(self, files, last_modified_file=None, last_time=None): # TODO: Find a way to improve this try: if not files: return last_modified_file file = files.pop() time = os.stat(file).st_mtime if not last_time or time > last_time: last_time = time last_modified_file = file return self.find_last_modified_file(files, last_modified_file, last_time) except Exception as err: self.filepath = None print('An error occurred while handling the wildcard files.') print(err) def set_mimetype(self): if not self.filepath: return guessed_type = mimetypes.guess_type(self.filepath, True) if guessed_type is None: print("Wasn't able to guess mimetype") return self.mime_type = guessed_type[0] def upload(self): try: uploader = self.google.upload(self.filepath, self.mime_type) print("Uploading 0%", end='\r') response = self.conclude_operation_while_logging(uploader, "Uploading") if not response: return print("Uploaded 100% ") print('File ID: %s\n' % response.get('id')) except Exception as err: print('An error occurred while uploading the file.') print(err) class List(Command): TYPE = "list" HELP = "Browse all files or search on Google Drive." FILES_PER_PAGE = 5 @staticmethod def add_to_subparser(subparsers): parser = subparsers.add_parser( List.TYPE, help=List.HELP ) parser.set_defaults(command=List) parser.add_argument( 'file', metavar='FILE', nargs='?', help="Name of the file to be searched (leave blank to see all files)" ) def execute(self): self.list_files() def list_files(self): files_found, next_page = self.search() self.describe_files(files_found) while next_page is not None: try: if self.input_stop_next_page(): break files_found, next_page = self.search(next_page) self.describe_files(files_found) except KeyboardInterrupt: return def search(self, next_page=None): search_query = self.args.file files_found, new_next_page = self.google.search_filename( search_query, self.FILES_PER_PAGE, next_page ) return files_found, new_next_page def describe_files(self, files): for file in files: Utils.describe_file(file) def input_stop_next_page(self): # When Enter is pressed, the result is blank, thus false return input(">>> Press Enter for the next results\n") class CommandParser(): """Starts the Commands' parsers and executes a command if the arguments are valid""" COMMANDS = [Download, Upload, List] NAME = "gdrive" DESCRIPTION = "A script to interact with your Google Drive files" def __init__(self): self.parser = ArgumentParser( prog=self.NAME, description=self.DESCRIPTION ) subparsers = self.parser.add_subparsers() for command in self.COMMANDS: command.add_to_subparser(subparsers) self.args = self.parser.parse_args() def execute_command(self): if not self.is_command_valid(): self.parser.print_help() return command = self.build_command() if not command.is_service_started(): print("Google Drive Service failed to start") return command.execute() def is_command_valid(self): try: return self.args and self.args.command except AttributeError: return False def build_command(self): return self.args.command(self.args) class ConflatedChannel: """A list of size 0 or 1. When adding to a list of size=1, substitute the value""" def __init__(self): """Initialize channel""" self.__channel = [] self.__open = True def __str__(self): """String representation""" return str(self.__channel) def send(self, value): """Adds to the channel. If full, replace value. If channel is closed, raise a ValueError. """ if not self.is_open(): raise ValueError("Channel is closed.") self.__channel.clear() self.__channel.append(value) def pop(self): """Returns the value in the channel, or None""" try: return self.__channel.pop() except: return None def is_open(self): """Checks if the channel is open for send()""" return self.__open def close(self): """Close the channel. It will not receive new values.""" self.__open = False class ProgressLogger(): """Logs progress from a worker thread.""" def __init__(self, operation): self.__channel = ConflatedChannel() self.__operation = operation self.__start_time = time.time() self.__worker = threading.Thread(target=self.__work, daemon=True) self.__worker.start() def send(self, status): """Try to send a new value to the channel.""" self.__channel.send(status) def close(self): """Close the channel and join the thread.""" self.__channel.close() self.__worker.join() def __work(self): """Print logs while the channel is open and receiving values.""" last_size = 0.0 last_time = self.__start_time while self.__channel.is_open(): status = self.__channel.pop() if status: new_time = time.time() self.__log_progress(status, last_size, last_time) last_time = new_time last_size = status.resumable_progress # Limits logs to 1 per second at most time.sleep(1) def __log_progress(self, status, previous_size, previous_time): """Print log from status.""" progress = int(status.progress() * 100) total_size = Utils.size_to_human_readable(status.total_size) current_size = status.resumable_progress size = Utils.size_to_human_readable(current_size) current_time = time.time() timer = time.strftime("%H:%M:%S", time.gmtime(current_time - self.__start_time)) diff_size = current_size - previous_size diff_time = current_time - previous_time speed = Utils.size_to_human_readable(diff_size / diff_time) estimated_time = (status.total_size - current_size) / diff_size / diff_time eta = time.strftime("%H:%M:%S", time.gmtime(estimated_time)) print("%s %d%% - %s/%s - %s/s - %s - ETA: %s " % (self.__operation, progress, size, total_size, speed, timer, eta), end='\r') class Utils: @staticmethod def describe_file(metadata): """Print file metadata""" print("Name: %s" % metadata["name"]) print("Owner: %s" % metadata["owners"][0]["displayName"]) if 'size' in metadata: print("Size: %s" % Utils.size_to_human_readable(float(metadata["size"]))) if 'modifiedByMeTime' in metadata: print("Modified by Me: %s" % metadata["modifiedByMeTime"]) else: print("Modified: %s" % metadata["modifiedTime"]) print("ID: %s\n" % metadata["id"]) @staticmethod def size_to_human_readable(num, suffix='B'): """Converts bytes to a more human-readable metric""" for unit in ['', 'K', 'M', 'G', 'T']: if abs(num) < 1024.0: return "%3.1f%s%s" % (num, unit, suffix) num /= 1024.0 return None def main(): CommandParser().execute_command() if __name__ == '__main__': main()
imageme.py
#coding:utf-8 #!/usr/bin/python """ imageMe is a super simple image gallery server. Run imageme.py from the top level of an image directory to generate gallery index HTML and run a SimpleHTTPServer on the localhost. Imported as a module, use imageme.serve_dir(your_path) to do the same for any directory programmatically. When run as entry point, imageme.serve_dir('.') is what's called. """ # Dependencies from multiprocessing import Process import base64, io, os, re, sys, threading from http.server import BaseHTTPRequestHandler, HTTPServer, SimpleHTTPRequestHandler import socketserver from socketserver import ThreadingTCPServer, BaseRequestHandler, TCPServer import argparse # Attempt to import PIL - if it doesn't exist we won't be able to make use of # some performance enhancing goodness, but imageMe will still work fine PIL_ENABLED = False try: print('Attempting to import from PIL...') from PIL import Image PIL_ENABLED = False print('Success! Enjoy your supercharged imageMe.') except ImportError: print( 'WARNING: \'PIL\' module not found, so you won\'t get all the ' +\ 'performance you could out of imageMe. Install Pillow (' +\ 'https://github.com/python-pillow/Pillow) to enable support.' ) ## Base64 data for an image notifying user of an unsupported image type UNSUPPORTED_IMAGE_TYPE_DATA = 'data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0aHBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/2wBDAQkJCQwLDBgNDRgyIRwhMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjL/wAARCAMgAyADASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwD36iiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAoorn/+E68If9DXof8A4MYv/iqAOgopM1n6lrukaKYv7V1axsPNz5f2q5SLfjGcbiM4yPzoA0aKytP8SaFrFw1vpet6bfTqhdo7W7jkYKMDJCknGSOfetUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRSc0A0ALRRRQAUUUUAFFFFABRRXg/xL+M/iLwv4z1DQNMtdPWK28vE0qM7ndGr/3gB970oA94orkfhhrmoeJPh3perarMJr2487zJAgTO2Z1HAAA4AFddQAUVieL/ABCnhXwnqOtvF5v2SLcsecbmJCqCewLEV89eF/jv4qh8Q2/9t3MN7pssoSWPyEjaNWONyFQDkdcHOenGc0AfUFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRSUDpzQAtFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABXwDX39XwDQB9/8Aavn/APaa6+F/+3v/ANo19Adq+f8A9prr4X/7e/8A2jQBz/7OP/JQ7/8A7BUn/o2Kvp+vmD9nH/kod/8A9gqT/wBGxV6D+0b/AMk9sP8AsKx/+ipaAPSrrxHoVgxW81rTrdh2mukT+Zptl4n8P6lKIrHXdMupD0SC7jcn8Aa+LvD3hrWPFeoPYaJZtd3SRmVow6rhAQCcsQOrD86n8R+DfEXhJo11zS5rMS/cclXRj6BlJGfbNAH3BRXgv7Pvja6u5LnwrqE7TLFF59m0jZKqCA0Y9uQQO3P4dJ8dvGF54a8LWtjpszwXWpyMhmQ4ZYlA3bT2J3KM+hNAHol/4h0XSn2ajrGn2b/3bi5SM/8AjxFOsdc0jU2xp+qWV2fS3uFk/kTXx34M8A6348urmDSBbqtsqtNLcSbUXdnaOASScHoO1dTL8A/GVrfW0bw2lzbvKiyy2twDsUkAthwp4HoKAPquiha+AaAPv6il718AUAff1VbzUbKwXdeXlvbL6zSqg/UirVfAVAH3DH4z8LSyCOPxLo7yHoq30RP/AKFW2DkA5B96+Ob34R+PLC0a5n8OTmNBk+VLHK2P91GJP5VneDPG+r+CdXS806ZjCzD7Ras37uZfQjsfRuo/SgD7YqG5u7azj8y6uIoE/vSuFH606CaO5gjnhkWSKRQ6OpyGBGQR+Br4H4oA+3x408KmTZ/wk2jbz/D9viz+W6tuN0kRXRw6MMhlOQR618ey/B3x/FbmdvDspQDOEniZvwUOT+lc74d8R6t4V1dNQ0m7e3nQ4YfwyD+669x9f0oA+5qKoaJq1rr2iWeq2T7re6iEqZ6gHsfccj8K8H+OPxLuzqUvhPR7loYIRi/mjbBkYjPlgjoAMZ9Tx2OQD3K+8TaBpc3k3+uaZaS/3Li7jjb8iRV+1u7a9gWe0uIriFvuyROHU/iK+LPD3gDxT4qtnudG0ea5t1JHmlljQkdQC5AJ+lZsU2r+GNcDRtc6dqdnJ3BR42HYj+h6igD7rpCQoJJAA6k9qwfBnie28YeFbLWrZdhmUrLHnJjkXhl/Pp7EGvmH42MT8XNbBJIAgAB7fuIzQB9RTeMfC9u+yfxJpEbZxte+iU/+hVpWl/Z6hF5tldwXMf8AehkDj8wa+N9D+GfjDxJpcWp6Tor3FnLu2S+fEgbBKnG5geoI/CvY/gL4L1fw5qGu3et6bPZTFIoIfNTG4ZLPg9xwnSgDxLx9LJL8QvEfmOz7dTuVXcc4AlbAHtX154E/5J54a/7BVr/6KWvkDx3/AMlD8S/9hW6/9GtUcHgvxVdW8Vxb+GtZmglQPHJHYSsrqRkEELggjnNAH3FRXlnwD0nUtH8CX1tqmn3djO2pSOsV1C0TFfKiGQGAOMgjPsa8q+MXxLu/Eeu3GiabctHoto5jYRtgXLg8s3qoPQdOM9+AD6QufFnhuyuDBdeINJgmBwY5b2NGB+hatWKWKeJZYZFkjYZV0YEEexFfGemfDDxprOlrqVjoFxJaOu5WZkQsvqqsQSPTAOayPDviHU/Cmtw6lplw8FxE2GXnbIueUYdwfT+tAH3NQeOar2V5BqFhb3tq/mQXESyxOP4lYAg/ka+T/i18Qbnxf4kntLedl0azkKW8SH5ZWHBkOOue3oPqcgH063jHwxHP5D+JNIWbOPLN9EGz9N2a+W/jWQ3xc1xlIIItyCO/+jx1m6N8MvGev6et/pugzy2rjckjukQceq72BYe4rndR0680m/lsdQtZbW7hOJIZkKsvGRkH1BBHsQaAPrT4Jf8AJINC/wC3j/0okrv64D4Jf8kg0L/t4/8ASiSu/oAp6tplprWk3Wm38QltLmMxypnGQffsfftXk/hr9n7SdF1+DUr/AFebUY7eQSRW3kCJSwORvO47hntxnvxwfRfHX/JPPEv/AGCrr/0U1fIHgT/kofhr/sK2v/o1aAPt6qd7qunacM32oWtr3/fzKn8yKy/HY/4t94l/7BV1/wCimr4qsrOfUb63sbWMyXNxKsUUYIy7scKPzIH40Afbdv4v8M3cohtvEWkzSngJHexsxPsA2a2WZUQsxAUDJJPAFfFPiH4feKvCtot3rWjS21szBfNDpIoJ6AlGIH41jwLqWryWmmW5uryQEx21spZ8ZOSEXtnqcUAfaieMfDEk/kp4k0h5c42LfRFvyzW3XxP4g+H/AIq8LWaXms6PNbWzEL5odJFBPQEoSB+Ndr8CfGN1pPi6LQJZmbTtSyqxseI5gCVYemcbT65HpQB9R1n3uu6PprFb/VbG0I6ie4WM/qRWhnvXxnYfCnx1qUYkt/Dd2qnp55WE/k5FAH1xY+JdB1SXytP1vTruQ9Et7qOQ/kCa1K+Gtf8AC2t+FrpLfW9Oms5JASm/BVwOuGBIOMjoa99/Z/8AGV3rOlXvh+/naaTT1WS2dzlvKPBXPopxj2bHQCgD2iqt7qNlpsXm395b2sX9+eVY1/MkVxHxa8ev4G8Mo1kV/tW+JjttwzsAHzSY74yPxI+lfKenaZqviPVBbafa3F/eyksVRS7H1J/xNAH2nB4w8M3LbbfxHpErekd9Gx/Rq2QwZQVIIIyCK+KfEfw+8UeErCO81zSmtLaSQRI/nRuC5BIHysccA/lXc/s5f8lCv/8AsFSf+jYqAPp6q19qNjpsHnX97b2kX9+eVUX8zXK/E3xuvgbwo97EEe/uG8m0jbpvI5Y+yjn64HevkrTdJ1rxXq7QWFvc6lfzEyOQSzHnlmY9Oe5NAH2vp/iDRdWcppur2F6w5ItrlJD/AOOk1o18O+IfCWveE544tc0yazMmfLZiGV/XDKSCfxr6B+BfxAuvEmnXGg6rcNNf2KCSGZ2y8sOcfNnqVJAz3DDvzQB7DRRRQB8SePpZJPiF4j3yM+3VLlV3EnAErYFfXngX/knnhn/sFWv/AKKWvkDx3/yUPxL/ANhW6/8ARrVHB4M8VXVvFcW/hrWZoJUDxyR2ErK6kZBBC4II5zQB9xUV5Z8A9J1HRvAt7b6pp93YztqcjrHdQtExXyohkBgDjIIz7GvmrxNosnh3xPqWkS5LWlw0YYjllz8rfiMH8aAPuiisDwRrq+JPBWkatuDPPbL5p/6aL8r/APjwNfM3xu1n+2PidfIjborBEtE+qjLf+PMw/CgD64qreajZWC7ry8t7ZfWaVUH6kV5x8BNDOlfDpLyRNs2pTtcHIwdg+RR9PlJ/4FXynQB9wx+M/C0sgjj8S6O8h6Kt9ET/AOhVtg5AOQfevjm9+EfjywtGuZ/Dk5jQZPlSxytj/dRiT+VZ3gzxvq/gnV0vNOmYwsw+0WrN+7mX0I7H0bqP0oA+2KKjhmjuLeOeGQSRSKHR1OQykZBH4Gvlz4xfEu78R63c6Hp1yY9FtJDGwjOPtLrwWYjqoPQdOM88YAPo658WeG7KcwXXiHSoJgcGOW9jRvyJzWrDNFcRLLDIkkbDKujZB/GvjTTPhj401jS11Kx0C4ktXXejs6IXX1VWYEg9sA5rI8PeItU8J65DqWmXDwXETYZedrrnlGHcH0/rQB9zUVi6rqUtz4JvdT0VjLLJp0lxZMgzvYxlkx+OK+II5ZIpUlid0kQhlZSQVI5yDQB980UfjRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABXwDX39XwfrGlXOiaxeaZeIUuLWVonHuD1Hsev40AfeAYMoIOQRkV4B+0yQT4XAPOLo/8AomtfwL8cvDo8MWln4juZbO/tYlhaQwvKswUYDZUE5IHOQOc15D8UfHY8e+J1u7aKSGwto/Jtkk+8RnJZh2JPb0AoA6j9nEH/AIWFfnHH9lSf+jYq7/8AaO/5J5p//YVj/wDRU1Y37OPh6WG11bxBNGVjn22tuxGNwUkuR7Z2jPqD6Vs/tHf8k8sP+wrH/wCipaAPP/2cf+Sh3/8A2CpP/RsVeufHGFJPhLq7sATE8Dr7HzkX+RNeR/s4/wDJQ7//ALBUn/o2KvYPjb/ySHXP+3f/ANHx0AeA/BFivxc0UA8MJwff9xIf6V7X8cPBd74q8K211pkLT32mSM4hUZaSNgN4X1Pyqcd8HvivE/gl/wAle0L/ALeP/SeSvprxX450PwWbE63PJCl47LG6Rl9u0DJIHOOR0B60AfGulazqWhXq3mlX09ncKMCSFypI9D6j2PFex+E/2h7+G4itvFFnFcW5IVru2XZIv+0V6N+GK9P1bXPhj4rsturar4eu4yvBnuI1kX/dJIZT9MV8k6nFaQareQ6fM09lHO628rjBeMMQrH3IwfxoA+8hXwDX3N4Sjmh8GaFFcA+emn26yZ67hGoP618OywyQTPDKpSRGKsrDlSOCDQB9918AV9Z6d8d/A11YRzXeoTWU5Ub7eS2kcqe43IpB+ua+TMUAff1fANff1fANAH38a+NPivp0GlfFDXrW3VVj88ShVGADIiuR+bGvoM/HbwF9jM/9pXBkxn7P9kk3n2zjb+tfLniHWrjxH4hv9YuVCy3kzSlAchQeij6DA/CgD60+D1xJdfCfQJJCSwikjGfRZXUfoBXxxX3F4M0VvDngzSNIfAltrZFlwcjeeWx/wImvh2gD79PWvlX4/adBY/Eoywqqm9so7iTaMfPlkz+SCvYbb47eA57TzpNRubaTGfIltJC+fTKgr+tfOPjvxZJ418W3WstEYYn2xwRMclI1GAD7nkn3JoA+gP2ebmSf4byxuSVg1CWNM9htRv5sa+Yb65kvL65upiTLNK0jk9SScnP519d/B7QZfD/w102C4jKXFzuupFIwRvOVz77Qua+afiT4al8LeO9TsGTbA8hntjjhonJK4+nK/VTQB9jaZZQ6bpdpY26hYLeFIowOyqAB/KvAf2lLKCPVfD98qjzp4ZonI6lUKEf+htW/4C+Ofh9fDNrZeJLiWzv7WIRGbyXkSYKMBvlBIJA5BHWvIvih46HjvxSL23jkisLaPybZJPvYySWI7Ent6AUAeofs03MjWPiO1JPlxywSKPdhID+iLXm/xt/5K9rv/bv/AOiI69r+AnhyXRfAjX9zGUn1ObzlUjBEQGEz9fmYezCvFPjb/wAle13/ALd//REdAH0B8Ev+SQ6F/wBvH/o+Su/rgPgl/wAkh0L/ALeP/R8ld/QB8Q+O/wDkofiX/sK3X/o1q+v/AAJ/yTzw1/2CrX/0UtfJvxO0+TT/AImeIYpFwXvXnA9pDvB/Jq9n+Hnxo8LWXgzTtN128ksryxhW3/1DusioMKQUB7AZzjmgD2qvgIV9t+EfGuj+N7O6utHaZorabyWMsewscA5Az05746GvjTWtHutB1u80q9QpcWsrRv74PBHsRgj2NAHp/wDw0Z4uH/MN0Pn/AKYy/wDxyvNPEeu3HibxBd6zdQ28M90waRLdSEBAAyASTzjJ56k19H+Hfj/4Wv7CP+2jNpl4BiUeS0sZPqpQE49iPz61Pqfx+8F2cZNpJe379hDblB+JfbxQBs/BuZpvhNoLvkkRypz6LK4H6AV8eCvv32r4T13Rbvw9rt7pN6pWe0laNuCA2OjD2IwR9aAPusEYwMfSvkL42/8AJXtd/wC3f/0RHXsHhb4+eGr3SYR4hmk0/UUULMRA8kcjf3l2AkZ64I46V4d8T9csPEnxE1XV9LmM1lceV5cjIULbYkU8HB6qaAPpD4Jf8kg0L/t4/wDSiSu/rgPgn/ySHQx/18f+lEld/QBgeOv+SeeJf+wVdf8Aopq+QPAn/JQ/DX/YVtf/AEatfX/jr/knniX/ALBV1/6KavkDwJ/yUPw1/wBhW1/9GrQB9f8Ajv8A5J54l/7BV1/6KavkDwJ/yUPw1/2FbX/0atfX/jv/AJJ54l/7BV1/6KavkDwJ/wAlD8Nf9hW1/wDRq0AfXXxBgSf4c+JEkG5Rptw4B9VjYj9QK+R/ATlPiH4aI76pbD85VH9a+vfHf/JPPEv/AGCrr/0U1fIHgT/koXhr/sK2v/o1aAPrr4gQJP8ADrxIkiggaZcOB7rGWH6gV8jeA2K/EPw0Qcf8TS2H5yrX1946/wCSe+Jf+wVdf+imr5A8Cf8AJQvDX/YVtf8A0atAH2fresWXh/RrnVdRlEVpbJvkbGT7ADuScAD1NfNGs/tAeL76dzpotNMgz8ipEJXA92fIJ/AV6f8AtCxTSfDeJos7I9QiaXH93a45/wCBFa8S+FHivS/BvjUalq8TvbNbvCJETc0LEqd+PoCOOcMfpQBR8SfEXxP4u0mLT9cvkuoI5hMh+zxowYBl6qo4wxrtP2cSf+FhagM8HSpD/wCRYq1/jT8RfC/i3wdZ6foepG6uY79JnX7PJGAgjkXOWUd2FZH7OI/4uHqH/YKk/wDRsVAEn7SDSf8ACdaYpz5Y01Sv182TP9K7L9m6O2HhHVpV2/amv9snrsEa7fwyX/WtP44+BrrxV4et9R0yIzahppZvJUfNLE2NwHqRgED6454r5r0PxDq/hq/+26PfTWdxjaWjPDD0IPBHQ4IoA+iv2jv+Se6f/wBhWP8A9FS15/8As4/8lCv/APsFSf8Ao2KuC8R+N/EviyKNNb1aa7iRt6xYVEDYI3bVAGcE84zzXe/s4/8AJQr/AP7BUn/o2GgCx+0hcyN400q1JPlx6cJFHu0jg/ogrkPBHxP1nwFY3VrpVjpsouZRJJJcxuz8DAGVdeByenc16h+0b4dmns9L8QwR7o7fdbXBH8IYgoT7Z3D6ketcJ8J/ikvgOW5sNRgkn0q6cSExYLwyYxuAOMggAEZ7DHoQCv4s+Mmv+MfD82i6lYaSkEjK2+GGRXUqwOQTIQDxjp0Jo+BszRfFnSkUnEsc6Ng9vKc/zAr3V/jj4ASLcNakdv7i2c2f1UD9a3PBHjfTvHmm3WoabDPDDBcm3KzgB2wqtnAJ4+b17UAdRRRRQB8Q+O/+Sh+Jf+wrdf8Ao1q+v/Av/JPPDP8A2CrX/wBFLXyb8TrCTTviZ4hhlUqXvZJx7iQ7wfyavZ/h58aPC1n4M03TNcu5LG8sYFtzmB3WRUG1SCgP8IGc45zQB7Sa+e/2jPDJjutN8TQR/LKPslwQOjDLIT9RuH/ARXsfhHxro/jezurvR2maG2m8ljKmwk4ByBnpz3x0NXfE2hW/iXw3f6NdYEV3CU3YzsbqrfgQD+FAHiv7OfiRI4dY8P3MoVY8X0O44wOFk/8AZD+deESyXGo37yuWmubmUseOXdj/ADJNJDcXFlM7RSSQybXibHB2sCrKfqCQfrXpHwJ8N/238QYr6VN1tpSG4YkceYeIx9ckt/wCgD6osrSGwsbezt0CQW8axRr6KoAA/IV8EV9/V8A0Affxr40+K+nQaV8UNetbdVWPzxKFUYAMiK5H5sa+gz8dvAX2Mz/2lcGTGfs/2STefbONv618ueIdauPEfiG/1i5ULLeTNKUByFB6KPoMD8KAPrP4PXEl18J9AkkJLCKSMZ9FldR+gFfHY6E+9fb/AIM0VvDng3SNIfAltrZVlxyN5GWx/wACJr4u1rSLvQdavNKvYylzaStG4PfHQj2IwQe4IoA9Q/4aN8Xf9A7Q/wDvxL/8crzbxHr1z4n8QXmtXcNvBcXbB5EtlKoCFAyASTzjJ56k19GeHvj/AOFr6xj/ALaM2mXgAEg8lpYye+0rk4+o/PrVjUvj94Ls0P2SW9v37LDblB+JfbQBsfBuZp/hLoLuSSElTn0WZwP0ArqYdA0W31E6jDpFhFfNyblLZBIf+BAZrRxXgP8Aw0x/1KP/AJUv/tVAHv2aWgDFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFch44+HOh+O7VBqEbQ3kS7YbuHAkQf3T2Zc9j+GK6+igD5ouv2cPEqSkWmsaTLH2aUyRnH0CsP1rd8P/s4RxXCTeIdZE0anLW9khXd9XbnH0X8RXvVFAEcEEVtBHBBGkcMahERBhVUDAAHYVynxH8Df8LA8PW+k/wBo/YPJu1ufN8jzc4R1243L/fznPauvooA8w+HHwf8A+Ff+IbjVv7d+3+baNbeV9k8rGXRt2d7f3MYx3rsPG/hn/hMfCF9oP2v7H9q8v9/5fmbdsiv93Iznbjr3rfooA8g8E/Az/hDvF9jr/wDwkf2z7L5n7j7F5e7dGyfe8w4xuz07Vs/Ez4Vj4gz2l0mrvZT2sRjRGiEiHJznqCD789OlejUUAfMj/s5eKw/yapopX1MkoP5COuv8Kfs86dp15FeeIdQGomMhhaxR7IiR/eJOWHtx75r2zFFABXmvj/4N6T41u21O3uG03VWGHmRNyS46b1yOfcEe+a9KooA+ZG/Zx8WB8DVNFKepllB/Ly61tP8A2a7kuDqXiOFFHVba2Lk/8CYjH5V9C0UAFfANff1fIP8AwpP4h/8AQv8A/k7b/wDxygDqLv8AZv8AEaTlbTWNKlhzw0xkjbH0CsP1rvfAfwN0vwtfQ6pqt0NT1CIh4V8vbFE3rg5LEdicY9M4NesZzS0AAr4Br7+zjtXyD/wpT4g/9C//AOTtv/8AF0AdZqH7N2uxzkabrenTw54NyrxN+Shv510/g79n2w0u9jvvEd6motGQy2kSEQ5H98nlh7YA9c9K9r60UAJisTxT4S0jxhpLadrFt5sed0bocSRN/eVux/Q9xW5RQB82X37N2vpOw07WtMnhzw1wJIm/IK3866bwn+zzYafdR3fiO/GoFCGFrApWIkf3ieWHtgV7bRQADivIPG3wM/4THxffa9/wkf2P7V5f7j7D5m3bGqfe8wZztz0716/RQBz/AIJ8M/8ACHeELHQftn2z7L5n7/yvL3bpGf7uTjG7HXtXQUUUAcV8QPhnpPj+1ia5ke11CBSsN3EoJA/usv8AEuecZBz0Iyc+NP8As4+KRKRHqujtHnhmklDflsP86+mqKAPPvhd8Npfh5bagJtVF7JfGMuqRbEj2bumSc/ePPHatjxn8P9C8c2ax6pCy3EQIhuoTtkj9s9CPYg/nXU0UAfNV3+zd4ijmIsta0uaLPDTeZEfyCt/Ormm/s2agzqdT8QWsS91toWkJ/FiuPyr6JooAK5bxp4A0Px1ZJDqkLLPECILqEgSR+2ehHsf0PNdTRQB81Xn7N/iFJiLHWtLmizw03mRHH0Ct/OprL9m3WJGH27X7GBe5gieX9Dtr6PxRQBieEPDUHhDwtZaFbzyXEVqHxLIAGbc7Oc492P4Vt0UUAZ+u6Z/bXh7U9J87yft1pLbebt3bN6Fd2MjOM5xkV5BoX7PP9ieIdM1b/hKPO+w3cVz5X9n7d+xw23PmHGcYzg17fRQBn69pn9t+HtT0nzvJ+3Wktt5u3ds3oV3YyM4znGRXkGhfs8/2J4h0zVv+Eo877DdxXPlf2ft37HDbc+YcZxjODXt9FAGfrumf234d1PSfO8n7day23m7d2zehXdjIzjOcZFeQaF+zz/YniHTNV/4SjzvsN3Fc+V/Z+3fscNtz5hxnGM4Ne30UAZ+u6Z/bXh7U9K87yfttpLbebt3bN6Fd2MjOM5xkV5BoX7PP9ieIdM1X/hKPO+w3cVz5X9n7d+xw23PmHGcYzg17fRQBBe2dtqNlNZXkKTW06GOWNxkMp4INeAaz+zdeC4ZtE16B4SfljvUKso9Cy5z9cCvoaigD5li/Zx8Ulh52q6Oi+qSSsf1QV6b8NvhCngDWJ9VfWWvZ5rY25RYPLVQWVs53En7nt1r02igDI8SeIdP8LaHNq2qO6WkTKrFF3HLMFGB365+grnZfFHwz8RKJr3UvDl3kcfbvK3flJz+lL8U/BeoeOvC8Wl6dewW0kdws588Ha+FYBcjJHLZ6HpXgFx8DPH0EhWPSYbhRxviu4gD/AN9MD+lAG78Z/GPhnUNNsfDfhWO0NtDcfabiS0iCRbgpVVXAAbhmJPTp15xe/Zt0uR9c1rVtp8qK2W2DY6s7BsD6bP1FZGjfs++Lb6Zf7Tls9Mgz8xaTzXx7BeD+LCvpDw/oGn+GdFt9J0yARWsC4A6lm7sx7knnP8qANCWJJomilRXjcFWVhkMDwQR3FeF+I/2cree4efw7q/2ZGORbXalwv0cc4+oJ9693ooA+Zof2cPFDOPP1bR0T1jeVz+RQfzr2H4bfDmP4eWF5Aupy30l2yM5MYjRSoP3Rknv3PYV3FFABRRRQBxXxA+GekePraNrl2tdRgUrDdxqCQOu1h/EueccY5weTnxp/2cfFXm4j1XR2jzwzSSg4+mw/zr6aooA8++F/w3l+HltfibVBeyXvll1SLYibN3TJOc7uvHQVtL8RPBsiO3/CTaYuwkMr3Ko2R/snB/Sunr5JufgZ4+t5THFpcNyvaSK7jA/8eYH9KAOZ8ca3aeJfGmq6vY232e3upt0aYwTwBuI9WxuPuTX1D8IvCjeE/ANrDPGUvrsm6uQRyrMBtU/RQvHrmvPvAXwCntNSh1PxXLA6QsHSwhO8OR08xsYx7DOfXtXvlABXwDX39XyD/wAKT+If/Qv/APk7b/8AxygDqLv9m/xGk5W01jSpYc8NMZI2x9ArD9a73wH8DdL8LX0OqardDU9QiIeFfL2xRN64OSxHYnGPTODXrGc0tACYrlvGnw+0LxzZLFqkLLcRAiG6hOJI/bPcexzXVUUAfNV3+zf4iSU/YtZ0qaPs03mRn8grfzq5p37NmoPIp1TxDbRJnkW0LSE/i23H5GvonFFADXdY0Z3IVVGSSeAK+CrW1lvbuG1t1LzTyLHGo/iZiAB+Zr7j8R6Odf8ADt/pIu5LQXkJhaaNQWVTwRg+oyPoa8v+H/wSk8I+MxrF/qNvfQ28bfZQkZRhIeNzA5AwM45PJ9qAPZOlFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUVBeXkGn2Vxe3UgitreNpZZG6KijJP5A0AT0V53B8avBt5rVlpdjdXd5PeXEdvG0duyqGdgoJ37eMn/61eiUAFFFFABRRRQAUUUUAFFFFABTfrinV8a/Fa7v7n4ma6NQZy8VyYolY8LEPuAD0K4P4570AfZI/KlrzX4E3V/dfDK3N8zssc8kdsznJMQxjn0DbwPpXpVABSYA6ClooAKKKKACiiigAorJ8QeJdI8Laet/rV6tpbNII1cqzZYgnGFBJ4B7dqwfCvxP8O+Mtdm0nRmupZYrdrhpJIdibQyrgZOc5YdqAO0ooooAKKKKACkOQcUtfIfxvlkb4ta1GzsUQQBVJ4X9xGeB2oA+u+aM8cHmuB+CX/JIdC/7eP/SiSvIv2jpZB4+sIhI3l/2XG2zPGfNl5x60AfTYOR0xQTXkP7OX/JPL/wD7Csn/AKKirlP2kbq/GtaNalnGnfZmkQA4VpdxDfUhdv50AfRINLXzr+zddX51jWrQM5077OsjDPyrLuwuPQld312j0FfRVABRRRQAUUUUAFFFFABRRRQAUUV5f4z+Nmk+D/EV1oj6Xe3V3a7PMYMqRncgcYOSejDsO9AHqFFeBN+0wobC+EmK+p1HB/8ARVbWk/tE+G7yVY9T0++08k8yDEyL9cYb8lNAHsdITUVpd21/aRXVpPHPbyrujliYMrD1BHWvnv8AaRur8a1o1qWcad9maRADhWl3EN9SF2/nQB9Eg0Hivnb9m66vzrGtWgZzp32dZGGflWXdhcehK7vrtHoKvftMSyJH4ZjV2VH+1FlBwGx5OMigD3vmjP514D+zL/zNP/bp/wC1q6P9ouWSL4e2PluybtUjVtpxkeVLwfagD1sc8EH60tfMP7OP/JQtQ/7BUn/o2Kvp6gAooooAKKKKACiiigAooooAKKKKADFFUdY1iw0HS59T1S5W2soMeZKwJC5IUdOepA/GuR0T4u+FfEXia00LSpru4ubovsk8gpGNqMxzuweinse34AHeUYFFFAB0ooooAKKKKACiiigAowKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKwPHX/JPPE3/YKuv/RTVv1geOv+SeeJv+wVdf8AopqAPjXw1qMOj+KdI1S4V3gs72G4kWMZYqjhiBkjnA9a+iv+GjfB/wD0Ddc/78Rf/Ha+cdB0wa14h0zSvO8k3t3Fb+bt3bN7hd2MjOM5xkV7d/wzP/1N3/lN/wDttAHu17eQadYXF9dSCK2t4mmlcg/KijLHjngAmvI9U/aM8OWzsmnaXqF7g4DvthVvpyT+YFejeO/+SeeJf+wVdf8Aopq+MvDumx6z4m0nS5XaOO9vIbd3TqodwpIz35oA9zg/aWtmkxceF5Y09Y70OfyKCvUte8b2HhzwfbeJruyv5LKZYmMcKKZIxIMjcCwHUgHBPJrz+6/Zw8ONCRZ6xqsUuPvS+XIPyCr/ADr1PX9Gh17w3qGjzALHd27wg4+4SOCPocH8KAOM8L/Gnw14t8Q22i2VrqcFzc7vLe5ijVCVUsRkOTnAPavRq+DtL1GfSdXs9Stjie0mSePP95WyP5V9ueJtYTQPC2p6uSv+i2zypnozAfKPxOB+NAHm7/tFeEFdl+wa22CRuEEWD9P3ter2dyL2xt7oRSRCaNZBHKAGTIzhgCRkZ55NfDOg6TLruv2GlQZ8y7uEhBAzjcQCfoBk/hX3bQAV5T46+J/gbSvEU+heIvD9xqVxZFTuazhmQbkVxt3uD0YdhzXq1fIPxt/5K9rv/bv/AOiI6APqXwt4hsvFnhq01vT4porW53+Wk6hXG12Q5AJHVT3rlPF3xl8N+D9Yn0i6hv7m+gC+YkES7V3KGGWZh2YdM1L8Ev8AkkOhf9vH/pRJXgHxs/5K9rv/AGw/9ER0AeiTftLQKx8nwrI69i98F/lGa3fDv7QPhvVrtLfU7S50p3IAlkYSRA/7TDBH1xj1IrlPh58FvD/i3wNp2uX9/qcdxdeYGSCRFQbZGQdUJ6KO9eXeOvCU3grxXc6NNN56IFkhm27fMRuQcdu4+ooA+2a5nxh460LwRZpcavcsJJM+VbRDdLLj0HAA9yQKwfglrUus/DSy89zJLZSPaFjySFwVH4Kyj8K+cfiXeXF98SvEMtyxLpfSQrk9ERiqj/vkCgD1iT9paES4j8KyNH/ea/AP5CMj9a7fwL8XdH8c6qdMtrC+tbxYjKRIFZNowD8wOe47d64/4cfDv4a+JfCdjK4XUdTaFWuka8dJIpMfMNisuADwCRyO5r0Dwb8NND8D6ne3ukNdE3UaxlJ3DiMA5wpwDzx1z0oA5P8AaO/5J7p//YVj/wDRUteN/CbxrpvgTxTdapqkF3NBLZPbqtqiswYujZO5lGMIe/pXsn7R3/JPdP8A+wrH/wCipa8Q+HHgb/hYHiG40r+0fsHk2jXPm+R5ucOi7cbl/v5zntQB71pPx78LazrNjpdtYayk95cR28bSQxBQzsFBJEhOMnng16B4i1608MaBd6zfJM1taqGdYVDOcsBwMjufWvJNC/Z6/sTxDpmq/wDCUed9iu4rjyv7P279jhsZ8w4zjGcGvW/EekL4g8N6lpDuIxeW7wiQru2EjAbHfBwce1AHjt1+0rZo5Fr4ZnlXsZbsJ/JWrS0L9ofw9qFykGq6fdaZvOPNDiaNfqQAw/AGm2/7OPhlYR9o1fV5Hx1jaNB+RQ/zrxP4heDJfAvil9Jef7RE0SzwTbdpeMkgZHYgqR+FAH2kCCMg8H0r5B+Nv/JXtd/7d/8A0RHXuPwG1WXU/hlBFKxY2NzJaqScnaMOB+AfH0FeHfG3/kr2u/8Abv8A+iI6APf/AIJf8kh0L/t4/wDSiSvIP2jv+Sh2H/YKj/8ARstev/BL/kkOhf8Abx/6USV5B+0d/wAlDsP+wVH/AOjZaAO//Zx/5J5f/wDYVk/9FRV1HxF8c+HvB9tZW3iHTbi/g1DzAsUcEcqfJtzuDsB/GMda5f8AZx/5J5f/APYVk/8ARUVc/wDtM9PC/wD29/8AtGgD0j4deOfD3jG2vLbw9ptxYQaf5YaKSCOJBv3Y2hGI/hOenarnjT4gaL4DgtZNX+0s91v8mO3j3M2zbu6kAfeHUjrXlf7MvXxT/wBun/taj9pr/mV/+3v/ANo0AWbn9pWyR8Wvhm4lX1lu1jP5BW/nU+mftI6RPMq6noN3aITgvBMs2Pcghf0rz74Q/DnS/iAdY/tO6vIFsfJ2C1ZVLb9+c7lP9wfnTPit8L18ANZXVjdzXWnXTNGDMo3xuOcEjAORkjgdDQB9V2V7bajZw3lnPHcW0yB45Y23KwPcGpzjBzjGO9fPv7N2tS/atZ0N5CYTGt3En90g7XI+uU/KtD9onxRPZ2On+G7aQoLwG4utpwWjU4RfoW3H/gIoA1/EH7QPhnSrh7fTLa51Z16yR4jiJ9mPJ+u3HuawoP2lrdmH2jwtKi9yl6GP6oP515z8M/hpcfEG+uHe5NpptptE0yruZmPRVB4zjPPbj1ro/ix8J9F8C+GLXVNMvdQmllvFtmS6dGGCjtkbVHdBQB7J4K+Kfh/x1fS2OmR3sN3FCZmiuIgPkDAEggkdWFdxXzB+zj/yUO//AOwVJ/6Nir6foAK+Qfjb/wAle1z/ALd//REdfX1fIPxs/wCSv65/27/+iI6AOj+E/wAKND8d+FrrVdTu9QhlivXt1S1dFUqERsncrc5Y1ifFD4WSeADa3ltem8025cxqzqFeNwMhWxwcgEgjHQ5Hr2HwW+Inhbwl4NvLDXNU+y3UmoPMsf2eV8oY4wDlVI6qfyrA+MPxSsfG8NnpWjwzCxtpfOeaZdplfBAwOwAZuvJz0GOQCT9n3xBPYeOX0UyE2uowv+7zwJEG4N/3yGH417V8RfHPh7wfbWVt4h024v7fUPMCxRwRyp8m3O5XYD+MY614Z8AdHmvviPHqCo3kadbySO+OAzqUUfU7mP8AwE11f7TX/Mr/APb3/wC0aAPSPh1458PeMba8tvD2m3FhBp/lhopII4kG/djaEYj+E56dq83/AGmenhb/ALe//aNH7MvXxT/26f8Ataj9pr/mV/8At7/9o0AH7Mv/ADNP/bp/7WroP2jv+Se6f/2FY/8A0VLXP/sy/wDM0/8Abp/7WroP2jv+Se6f/wBhWP8A9FS0Aef/ALOP/JQtQ/7BUn/o2Kvp48CvmH9nH/koWof9gqT/ANGxV7v8RPEx8I+BtS1aMj7SqCO3/wCujHap/DO76CgDL8ZfFrw14Mna0uJZLzUVxutbUAlPTcScL9OvtXB2/wC0raNOFufDE0cOeXivA7Y/3SgH614l4X8O3vizxHaaNY48+4bBdukagZZj7AD+nevb9e/Z0sE0WWTQ9UvX1GNCyx3WwpKR/CMKNufU5oA9f8O+JtH8V6aL/Rr1LmDO1gMhoz6MDyDXP+Nvido3gK+trXVrHUpTcxmSOS1jRkODgglnBz07dxXyt4P8TXXhHxRZaxauw8lwJkU/62I/eQ/UfkcHtX0X8f8AQ/7T+Hv9oRpum0y4WUnHPlt8jD8yp/4DQB1ngjx5pXjzT7q80qK6iW3l8p47lVVskAg4VmGDkjr2NS+NPGmmeBdHi1PVEuJIpZhAqWyqzliCc4ZlGMKe/pXg37O2sfY/G95pbvhL+0O0eskZ3D/x0vVj9o3W/tXinTdGR8pY25lkAPR5D0P/AAFVP40Ae2eCPHmk+PLC5u9KiuoltpBE8dyqq2cZBG1mGPx7Gjxt480nwFp9tearFdTLcy+Ukdqis2cZJIZlGPx7ivCv2d9Z+xeN7vS3fCahanaPWSM7h/46Xo/aJ1j7Z44s9MR8pYWg3D+7JIdx/wDHQlAHs/gn4oaN49vrq10qx1KI20Qkkkuo0VeTgAFXY5PPbsea7ccivK/gFoX9l/DwX8iYm1Odpskc7F+RR+jH/gVeqUAcB8bf+SQa7/27/wDpRHXzT8O/EVn4S8dabrl/HPJbW3m70gALndE6DAJA6sO9fS3xt/5JBrv/AG7/APpRHXzD4I8NDxh4vsdBF39k+1eZ+/8AL8zbtjZ/u5Gc7cdR1oA9+/4aN8H/APQN1z/vxF/8dr1+vAP+GZ/+pu/8pv8A9truvjP4sm8K+BZBZSmO+1CQW0TqcMgIJdh+Axn1YGgCHxd8bPDPhe7ksYfN1S9jJWSO2I2RnuGc8Z9hn04rlrH9pPTpbhV1Dw3c28JPLwXKykfgVT+deP8AgHwVd+O/EaaZbSiCFEMtxcFc+Wg44HckkAD/AAr03xx8ArbSPDtzqnh6/vLiW1jMsttdbWLoBklSoHIGTgjnFAHvOkavp+u6bFqGl3cV1aS/ckjOR9D3B9jzV6vjv4UeLJvCvjqwYyEWV7IttdJngqxwGP8Aukhs9eo719iYxxQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFYHjrn4e+JR/wBQq6/9FNW/VbULOPUdNurGb/V3MLwv9GBB/nQB8VeBePiD4aJOANVtcn/tqtfbxr4JkiuLC7eKaOSC5gk2srAqyMD0x1BBr1F/2hfGbW3kiDSUfGPOW3Yt9eXK/pQB9C+O/wDknniX/sFXX/opq+QfAn/JQvDX/YVtf/Rq19Y+JL9dU+Eer6gn3brQppxj/agLf1r5O8Cf8lC8Nf8AYVtf/Rq0Afb1FFFAHyD8Y/C58NfEK9MabbTUP9MgwOBuJ3j8GDcehFbHiX4grq3wO0LQxNm+8829yuefLgAK5+oaPn1U17R8XvCX/CWeBbkQR77+xzdW3HLYHzJ+K549QtfIHtQB7R+zr4dN54nvtelTMVhF5URI/wCWsnBI+ihv++hX0rXKfDfwz/wiXgXTdNkQLclPOueOfNfkg/Thf+AiuroAK+Qfjb/yV7Xf+3f/ANER19fV8g/G0f8AF3tc/wC3f/0RHQB7/wDBL/kkOhf9vH/pRJXz/wDG3/kr2u/9u/8A6Ijr6A+CX/JIdC/7eP8A0fJXgHxsGfi9rn/bv/6IjoA9/wDgn/ySHQv+3j/0okrx/wDaNGPiHYf9gqP/ANGy17B8E/8AkkOhf9vH/o+SvH/2jv8AkoVh/wBgqP8A9Gy0Aegfs4/8k91D/sKyf+ioqb8Vfg3L4s1Btd0GWKLUmULcQSnak+BgMG7NgAc8HA6d3fs48fD2/wD+wrJ/6KirktS+PuvaT4t1a2jtbG+02G8kjgDgo4RWIHzA45x3FAHkGseH9X8PXP2fV9NubKU/dE0ZUN7qejD3Ga9O+C/xH1TT/E9l4d1C8ludMvG8iJZWLGBz93aT2JwMdOc9uZvE/wAf5/EPhu/0iLw5FbfbIWheWS6Mu1WGCQuxefQ544rk/hD4du9f+IulvDGxt7CZbueUDhAh3KCfUsAB+PpQB7J+0d/yT2w/7Csf/oqWvP8A9nIgfEO/BPXSpAP+/sVey/F3w5P4m+HV9a2kZkurcrdQxqMlinUD1O0tgdzivk/w/r2o+GNat9W0qfybuEnaduQQRggg9QR/nNAH3VmvHPjr8QL/AMN2lnoWj3D215eoZZriM4eOLOAFPYkg89Rj3rg4f2gPFl3qVks66da2onj8/wAiA5ZNw3DLs2MjPTmtz9o7w9dNeaX4ijjZrYQ/ZJmHSMhiy5+u5vyoA8z8GfDfxB47WeXSkgS3hbY9xcyFU3YztGASTjngdx61T8Z+DdT8DaxFpeqSW0k8kAnVrZyy7SzKOSAeqmr3gv4l+IPAkdxBpTW8ltO294LmMsm7GNwwQQcY79hVLxp401Hx1rMWqapFbRzxW626rbIyrtDM3QsTnLHvQB73+zj/AMk91D/sKyf+ioq8g+Nv/JXtd/7d/wD0RHXr/wCzjx8PdQ/7Csn/AKKirzb4/aJcWHxDfVGjb7NqUCMkmONyKEZfqAqn/gQoA9p+CR/4tFoY7/6R/wCj5K8g/aO/5KHYf9gqP/0bLVP4O+OfEWm+ItN8MWcsUmm3l0N8U0ZbyweXKEEEHAJ9M9q6L9pDQ7garpGupGWtmtzaO4HCMrFlB+u5sfQ0AdT+zkcfDzUP+wrJ/wCioq5/9pnp4W/7e/8A2jXmnw+8ceIvCmqLZ6JJE0d9Mkb286b0ZicA8EEHnsa9L/aYHHhfp/y9/wDtGgA/Zl6+Kf8At0/9rUftNf8AMr/9vf8A7Ro/Zm4/4Sj/ALdP/a1L+01z/wAIv/29/wDtGgBP2Zf+Zp/7dP8A2tXQftHD/i3unn/qKx/+ipa5/wDZm4/4Sn/t0/8Aa1dB+0dz8PbD/sKx/wDoqWgDgP2cf+Shah/2CpP/AEbFVn9pGzkTxhpF6QfLmsPJUnplJGJ/9GCq37OQ/wCLg6h0/wCQVJ/6Nir3rxv4NsfHHhyXSrxjE+RJBcKMtDIM4IHcc4I7g9utAHlH7O/inTLXStQ8PXVzFBdvdfaYRIwXzQyqpC56kbBx159jWp+0ZfWjeCdPshdQm7/tJJfIEg37BFIC23rjLDn3FeN6t8K/G2kXLxS+Hry4APElmnnqw9RsyfwIB9qzNV8GeI9C0dNU1bSbiys5JlhU3GEZnIY42k7uinnFAHf/ALOP/JQ7/wD7BUn/AKNir6fr5h/ZxB/4WFqBxwNKkB/7+xV9PUAFfIPxt/5K9rn/AG7/APoiOvr6vkH42/8AJXtc/wC3f/0RHQAzwV8J9d8eaNNqml3enQwRXBt2F1I6sWCqxICoRjDjv613ulfs2XBlVtZ1+NYwfmjs4Sxb6M2Mf98mul/Zy/5J5qH/AGFZP/RUVev0AZ2h6FpvhzSotN0m0S1tY+Qi9z3JPUk+prxH9pnp4X/7e/8A2jXv9eA/tMjI8L/9vf8A7RoAT9mXr4p/7dP/AGtR+01/zK//AG9/+0aP2ZuP+Eo/7dP/AGtWv+0ZolxfeGtL1aCNnTT53WbA+6sgUbj7ZRR/wIUAZH7Mpx/wlHv9k/8Aa1dB+0d/yT2w/wCwrH/6KlrwHwl4y1nwVqbX2jzqjyJsljkXcki9cMPr6YPvX0n8ctDuNZ+Gs7WsZkksbhLsooySqhlY/grE/QUAeVfs4/8AJQr/AP7BUn/o2KvRP2iI3k+HNsyAlY9SiZ/YeXIP5kV84+HvEOp+F9Yh1XSZ/JuosgHAIYHqpB6g19j3+j/8Jh4COm6sqRy6hZJ52xeI5SobKg/3XwQD6CgD58/Z5nhi+JEySEBptPlSPPdt6Nx/wFWr6lyP618JMmqeG9b2ss9jqdlKDg5V43HSu41343+MNe0aTTJXs7SKVCkslpEVkkUjkEljjPtigDzevvXULKHUtOurG4XdBcxNFIvqrAg/oa+Rfhb4DuvGnimBpYG/si1kEl5KR8rAc+WPUt0x2GTX2FQB8NeGNWk8OeLNM1TDA2d0kjr3Kg/Mv4jIrUmab4hfFBiu7/ibaiAvqkRbA/75T+VbPxs8Of2B8RryaNNttqQ+2R46bmJDj/voE/iK6f8AZz8PfavEOo6/KmY7KEQQkj/lo/Uj6KCP+B0AedQmfwD8S137t2k6lhvV0V+fwZf51n+JtWk8SeLNS1UBmN7dPJGvUhSflX8BgV6l+0X4f+x+JtP12JMR30JilI/56R4wT9VKj/gNc98EvDZ1/wCIlrcSR7rbTF+1yZHG4HCD67iD/wABNAH1bY2cOnadbWNsuyC2iWGNfRVGAPyFWKKKAOA+NnPwh13/ALYf+j46+f8A4J8fF3QiTgfvx/5Akr6h8a6G3iTwXq+jx4825t2EWTgeYPmXJ7DcBXxVp99daTqNvfWcjQ3dtIJI5B1VgcjigD7zzivBf2mInNt4alGfLV7lW+pEZH8jXJXf7QnjS4tzHFHpdq3TzYbdi3/j7MP0r3z4ieEV8beDrrSlKpdAia1kbosq9M+gIJU/71AHkv7NM8K3viSBiPOeO3dB32qZA36sv6V79eXENrY3FxOQIYo2eQnptAyf0r4c0rVdU8L63HfWEstnf2rkdMFT0KsD+IINdX4r+L/inxhpbabePa2tm/8ArY7SMp5uOQGLMTj2zQBwtvFJNdRRRZMjuFXHXJPFffNfLnwQ8BXOt+JYPEN5AV0vT38yNmHE0w+6F9Qp5PuAK+o6ACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigApCM0tFAGNrvhPQfE0aprOlW15tGFeRfnUezD5h+BrAtfg94BtJRJH4dhZgc/vZpJB+TMRXcUUAQNZWr2LWLW0LWjRmJoCgMZQjBUr0xjjHTFZUHgvwta3EVxb+GtGhnicPHJHYRKyMDkEELkEHnNblFABXN+PPE7eD/Bt9rccSTSwbBHE5IDszhccfUn8K6SsbxN4X0rxfpH9maxA8ttvEihJGQqwBAOQfQnrxQB4D4j/AGhtU1jRZ7DTdHj02WdDG9z9pMrKp67RtXBx35x9a4/4YeCpfGvjCCB4idNtSJr18cbAeE+rEY+mT2r26L9njwbHMJHudXlXOfLe4TafyQH9a9N0zSrDRtPisNNtIrW0iGEiiXAHv7n36mgC5RRRQAVjX3hLw5qd495f6BpV1dSY3zT2ccjtgYGWIJPAA/CtmigCvY2FnplnHZ2FrBaWsedkMEYjRcnJwowBkkn8azr7wl4c1O8e8v8AQNKurmTG+a4s45HbAAGWIyeAB+FbNFAFexsLPTLNLSwtYLW2jzshgjEaLk5OFAAGSSfxqnqXhrQtZuFuNU0TTb6dUCLJdWqSsFyTgFgTjJJx7mtSigClpukabo1u1vpen2ljAz+YYrWFYlLYA3YUAZwAM+wrI1f4f+E9dkaXUdAsZZWOWlWPY7fVlwT+ddJRQBwsHwb8AW8gdPDsZIOcSXEzj8mciu0tbS3srdLe1giggQYSKJAqqPYCpqKACuX1v4c+EfEVy9zqehWstw/LyoDE7H1LIQSfrXUUUAchpvwt8EaTMJrTw5Z+YpyDPumx9N5NdcQCCMDFLRQBxV78JPAeoStLP4ctlYnJ8h3hH5IwFJa/CLwFaOHj8OW7Ef8APWSSQfkzEV21FAFax0+z0y3FvYWlvawA5EUEYRc/QcdqfcWsF5bPb3UMc8Eg2vHKgZXHoQeoqaigDldK+G/hHQ9ai1jTNFitr6LdskSSTC7lKnC7tvQkdK6h0WRGR1DIwIZWGQQe1OooA5CL4XeC4NWt9Ug0GCG8t5VmieF3QK6nIO0MF4I9K39U0HSNb8r+1dKsb/yc+X9qt0l2ZxnG4HGcDOPQVoUUAZ2maDpGieb/AGVpVjYGbHmfZbdIt+M4ztAzjJ/M0anoOka35X9raVY3/k58v7VbpLszjONwOM4HT0FaNFAGfpehaRonm/2VpVjYedjzPstukW/GcZ2gZxk4z6mn6lpGm6zbLb6pp9pfQK4dY7mFZFDAEAgMCM4JGfertFAGXpvhrQtGuWuNL0XTrGdk2NJbWqRMVJBxlQDjIHHsK574qeLLrwb4Il1OwaNb1p4oYTIu5clstkd/lVq7WsnX/DOjeKLEWWtWEd5bhtyq5KlT0yCCCDye/egDw61/aWvEjAu/DEEsndorwxj8ijfzrzzx98SNV+IF3A15FFbWdtnybaIkgE9SSfvHgDPHH1Ofcrj9nnwZM+6OfVrcf3YrhCP/AB5Ca1NG+CPgfR5lmOny38iHKm9l8xfxUAKfxBoA5T9nfwvcWGl6j4huoigvtsNruGC0aklm+hOAP9017fRiigArGvvCXhzU7uS7v9A0u6upMb5p7OOR2wMDLMpJ4AHPpWzRQBT03SdO0a3a30uwtbGBnLtHawrEpYgDJCgDOABn2FXKKKACs/VNB0jW/K/tXSrG/wDJz5f2q3SXZnGcbgcZwM49BWhRQBnaZoOkaJ5v9laVY2Bmx5n2W3SLfjOM7QM4yfzNaGOOtLRQBxt98KPAuoXHnz+HLRXzn9yXhH5IQP0rsqKKAOS1X4Y+C9anae98PWjSucs8W6EsfUlCM11uKKKAM/VtC0rXrX7Nq2n217COQs8YbafUE9D7iuWt/g74BtZxNH4dhZwc4kmlkX/vlmI/Su5ooAYkSRIqRqqooCqoGAo9AKfRRQBn6noek60sa6rpllfLGSYxdW6yhCeuNwOM1Jp2lafpFsbbTbG2soCxYxW0KxLuPU4UAZ4H5VcooAp6jpOnaxbrb6nYWt7ArBxFcwrKoYd8MCM8nn3pmmaJpWipImlaZZ2KyEF1tYFiDEdM7QM1fooAKKKKAEIyc1ga94G8M+J5PN1jRra6lxjzSCkmPTepBx+NdBRQBxdj8JPAenyiSDw5bMw/57u8w/J2IrswABgcD0FLRQBj654V0LxJEses6Va3oThGlT51Hsw5H4GsKx+EfgPTpxNB4ctmYHOJ3kmX/vl2IrtaKAEAwMCloooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAgvbuDT7G4vbqURW9vG0ssjdEVQSSfYAVwUHxq8G3mtWWl2Fzd3c95cR28bR2zKu52Cgkvt4yfeuk8d/8k88Tf9gq6/8ARTV8g+BP+SheGv8AsK2v/o1aAPt6iiigArzTUPjv4Gss+TfXN6R2t7Zv5vtFemCvgCgD7+ooooAKKKKACiiigArF8VeJrDwj4fuNY1F8QxDCoD80rn7qL7n/AOv0Brar5B+L3jF/FvjW4WGUtp2nsba2AOVOD8zj/eI6+gFAH074T8aaJ4004XmkXQcqB5sD/LLCfRl/qMg9jWNrXxg8FaFeXFnc6q0l3byNHLBDbuxVlOCM425BHrVP4N+DV8K+CoZ54wNR1JVuJyRyqkfIn4A5+rGvmfx3/wAlD8S/9hW6/wDRrUAfaOk6lFrGj2OqW6usF5bx3EayABgrqGGcEgHB9T9au1z/AIE/5J54Z/7BVr/6KWugoAKKK+cfGHx28U6d4l1bSLC306CKzvJrdJTEzuQjlcnLY7elAH0dRWR4UvrnU/B2iahePvurqwgmmfaBudo1YnA4HJNa9ABRXzj4v+O3irTvEmraTYW2nQRWd5NbpKYmd2COVBOWx29K938KX1xqfg7Q7+7k8y5utPgmmfaBudo1LHA4HJPSgDXooooAKKKKACiiigAooooAKKKKACszxFrUHhzw7qGsXI3RWcDS7c43kDhc+pOB+NadeGftG+IvI0zTPDsT/PcSG6nAP8C8KD7Elv8AvmgBNL/aIuNW1az0228IZnupkhj/AOJj3YgD/ll717pXzL+zz4d+3+L7vW5UzFpsO2Mn/nrICB+Sh/zFfTVAGX4j1uDw54d1DWLgborOBpducbyB8q57ZOB+NeP6V+0Pcavq1np1r4R3T3UyQxj+0e7EAf8ALL3pf2jvEPkaVpfh6J8PcObqcA/wL8qg+xJY/wDAK5j9nfw99v8AF15rcqZi02HbGT/z1kyP/QQ/5igD6aooooAKhurqCxtJru6lSG3hQySSOcKigZJJ+lTVzvjvQJvFHgjVdGt5FjnuYh5TMcDerBgD7EqB+NAHN+HvjX4R8Ra1HpUMl3azzPsge7iVElbsAQxwT2zjnjrXo1fH/hb4VeLdW8R21tcaNe2NskqtPcXUTRqiA8lSR8x9AM5+nNfV3iLUpNH8MatqkSK8llZzXCo3RiiFsH8qANKivmjQvjd4x17xlodg8llbWt1qEEMsdvb/AHkaRVYZcsRwTyK+l6AIL27g0+xuL26lEVvbxtLLI3RFUEkn2AFcFB8avBt5rVlpdhc3d3PeXEdvG0dsyrudgoJL7eMn3rpPHf8AyTzxN/2Crr/0U1fIPgT/AJKF4a/7Ctr/AOjVoA+3qKKKACiiigAooooAgvbuDT7G4vbqURW9vG0ssjdEVQSSfYAVwUHxq8G3mtWWl2Fzd3c95cR28bR2zKu52Cgkvt4yfeuk8d/8k88Tf9gq6/8ARTV8g+BP+SheGv8AsK2v/o1aAPt6iiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKAIri4itLaW5ncRwxIZJHPRVAyT+VeDL+0uzMFXwgWJOABqPX/yFXV/HvxD/ZHgA6fG+LjVJRCBnkRr8zn/ANBX/gVeMfBXw9/b3xJsXkTdb6eDeSZHGV+5/wCPlT+BoA+uh0560UUUAFFFFABRRRQAUUUUAFFFFABXlXxS+L0Xg2Q6RpMcd1rJXdIZOY7cEcbgOrEc4/E+h9Vr4Iubqa8u5rq5kMk8zmSR26sxOST+NAHQ3PxG8aXdwZpPFGqq2c4iumjX/vlSB+ld74F+O+s6ffw2fieX7fpzsFNyVAmhHrx98DuDz6HtXo+l/Er4TaLpSaZp+owQWipsMa6fN8477v3fzE9yetfN3ixNFj8U6gPDtx52kNLutn2MuFIB24YBuCSvPpQB9x18g6l8Z/Heou3/ABOjaof+WdrCiY/4Fjd+tfSHwt1GTVfhloF1K26QWwiLE5J8slOf++a+L6AOjPj7xiX3HxVrWfa+kx+W7Fd/4H+O2t6dqMFr4lnGoaa7BXnZQJoR/eyMbgO4Iz6GvpWW0tprdraW2ikgZdpiaMFSPQg8V8M6/p40jxFqemhiwtLuWAE9Tscr/SgD7s7Zr51+Mnj7xf4f8b3Gk6fq8lpp5hjliSKNA3K8/NjP3g3evdPCt2994P0S8kOZLiwglYnuWjUmsLxd4p8DeGNQS719rE6oIwkYFuJbjZkkDgEqMk4zgcmgD5ZX4geMVk8weKdZJ972Qj8s4r2L4VfGjUNW1m38P+JnjlkuTstr1VCsX7I4HHPQEAc+uci1rPxo+HnibTptL1TSdTe3mQp5klrGdmR95TvJBHByBXzrbztbXUU6Eq8Th1I6gg5oA+2fHf8AyTzxL/2Crr/0U1fFOn31xpepWuoWjhLm1mSeJiAdrqQynB4PIFfa3jv/AJJ54l/7BV1/6KavjzwZBDdeOvD9vcRJLDLqVskkcihldTKoIIPUEdqALUnxD8Zyz+a3inV9+c4S7dV/75Bx+lerfDT44X1xqdvoviqWOWO4YRw6hgIyOeAJMcEHgbsDHfPb3W90nTtRsHsbywt57VlKmGSMFcfTHFfDerWD6VrF7p0hy9pcSQMcYyUYqT+lAH3iOpr4Br7x0S//ALU0HTtQIx9qto58em5Q39a+DqAPv6vKvin8XYvBkv8AZGkxxXOssoLmTmO3BHG4d2Pp6cntn1U18EXNzLe3ctzO5eaZ2kkc9WZjkn8yaAOhufiN40u5zNJ4o1ZWPaK5aNf++VIH6V3vgb476zp9/FaeJ5ft+nOwU3OwCaEdM/LjePUHn0PGK9G0r4l/CfRNKj0zT9SghtETYUXT5/m4xlv3fzE9yetfN/ixNFj8UaiPDs/naQZd1s21lwpAO3DAH5SSvPXH40AfcQ7dc0tcj8LtRk1X4Y6BdSsWcW3kkk5J8tjHz7/LXXUAZviDVBonhzU9UbH+iWsk4B7lVJA/EiviTQtLfWfEGnaWhIa8uY4AR23MBn8M19Z/GWVofhLrzKcEpEv4GZAf0NfO3wahWb4taCj4IDyv+KwuR+ooA+w6+IfHf/JQvEv/AGFbr/0a1fb1fEPjv/koXiX/ALCt1/6NagCQePfFqWltaw+I9TggtoliijguWiVUUAKMKR2Heur8K/G/xToV5GNTu31bTycSRXGDIB6q/XP1yP519E+ArS2T4eeHglvCol0y2Z8IPmJiUkn1zXyl8StFt/DvxE1rTLRBHbRzB4ox0RXUOFHsA2B9KAPtCKSOaNJYnDxuoZXU5DA8gj1r4l8df8lD8Tf9hW6/9GtX1N8H7uS9+FGgSyEllieLJ9EkdB+iivlnx1/yUPxN/wBhW6/9GtQBr2Pxe8daZp1rYWeu+XbWsSwwp9kgbaigBRkoScADrXv/AMFPFGs+LfB13f65efa7qPUHhV/KSPCCOMgYQAdWP51c8GeDfC114F8P3Fz4a0eaeXTbZ5JJLGJmdjEpJJK5JPrXX6bpOm6Pbtb6Xp9pYwM5do7WFYlLEAZIUAZwAM+woA+LfHf/ACULxL/2Fbr/ANGtWxYfF7x3pmnW1hZ675draxJDCn2SA7UUAKMlMnAA61j+Ov8AkoXiX/sK3X/o1q+ovBng3wrdeBfD9xceGdGmnl0y2eSWSwiZnYxKSSSuSSec0AU/gr4o1nxb4NvL/W7z7Xcx6g8Kv5aR4QRxkDCgDqxPTPNeReOPi740g8WazplpqotLW0vZreNYYEDbUdlGWIJzgetfTGm6Tp2j2zW2mafa2MDOXMdtCsaljjJwoAzwOfavi7x3/wAlD8S/9hW6/wDRrUASP4/8Yu+4+KtZz14vZAPyBxXaeE/jx4k0e5jj1xxq1gWAfeoWZB6qwxuPs2c+or6C8CwxH4deHFMSFW0q2LAqMHMS5zXzp8bfBdn4T8V28+mQrBYajG0iwr92N1OHCjsOVOO2T0GKAPqi1u4L20iu7WVJoJkDxyIcq6kZBB9MV458WPjHdeG9Rk8P+HfLF/GB9ou3UOISRkKgPBbGCSQQM4+j/wBnLWJbvwpqmlyMWWxuVePJ+6sgJx9Nysf+BGvFfiZYT6d8SvEMVwrK0l9JOpPdXJdSPwagCvJ8QfGUkpkbxTrAbOcLeSKPyBxXr3wP8a+LfE3ia6sdU1V7zT7e1MrCWNS27coUb8Z7k8ntVbwF8eNO0jQrDRdd0yaNLOFYEubXDBlUYBZDjBx1IJz6V7jouuaR4hs/t+j30F5C2FMkTZI9mHUHnoaAPM/jN8T9T8G3Vno2iCOO9uIftElxKgby0LFVCg8Ekq2cg4wOOeD4M/E/U/GdzeaRrYjkvbeH7RHcRoE8xNwVgwHGQWXpjg9OOen+IXwz0z4gwW7XE8lnfWwKxXMahvlPVWXjIzyORgnryRR8Pfhnpnw/gneCeS8vrgBZbmRQvyjnaqjO0Z56nPHPAoA7ivin4ieIf+Eo8eatqavugaYxQY6eWnyrj6gZ/Gvrnxnr6eGPB2qawWAe3gYxA95D8qD8WIr4w8P6PNr/AIh0/SYMiS7nSLI/hBPLfgMn8KAPrX4SeHv+Ec+HGlwOm24ul+1zf70mCAfcLtH4V29ArC8Z68vhjwdqusFgHt4GMWe8h+VB/wB9EUAfI3xD8Q/8JR471bU1fdA0xjg548tPlXH1Az+NfT3wk8Pf8I58ONMgdAtxdL9sn7EtJyM+4XaPwr5M8P6PN4g8Q6fpEGfMu50i3AZ2gnlvwGT+FfXXxV1KXSfhhr11AxWTyBCGHBHmMsZx7/NQB5H47+PmoT3s1j4S2W1ohK/bpEDSSkcZUHIVfTIJ6HjpXmTfEDxk0nmHxVrIY9heyAflnFL4B8Px+KfHOk6NOxWC4lJlwcEoqlmAPuFIr7QgsbS2t1toLWGK3AwIo4wqgemBxQB8k6b8ZvHenOv/ABO2uUHVLmJHB/HG79a+p/Fl7cab4O1y+s5PKurXT55oXwDtdY2KnB4OCB14r4aNfb3jv/knniX/ALBV1/6KagD538KfF3x1qfjLQ7C713zLa61CCGZPskA3I0ihhkJkcHHFfQ/jr/knnib/ALBV1/6KavkHwJ/yULw1/wBhW1/9GrX1/wCO/wDknniX/sFXX/opqAPimwvbjTtQtb+0k8q5tpVmhkwDtdSCpweDyO/Fdv8A8Ls+If8A0MP/AJJW/wD8brnPBcMV1458PW9xEk0MmpWyPHIoZXUyqCpB4IIPSvsH/hBfCH/QqaH/AOC6L/4mgBfHf/JPPEv/AGCrr/0U1fE9leT6ffW97aymK5t5VlikHVXU5B/AgV9seO/+SeeJf+wVdf8Aopq+PfBEaTePvDkciK8b6pbKysMhgZVyCO9AFhPiF4yWbzf+Ep1jdnODeSEflnFe2fCj4yXfiHU4vD/iPy2vJQRbXaAJ5jAZ2uBwGIzgjGcYxzXqXibw3p/iTw9d6Xd2kUiyRMsZKDMbkcMp7EHFfEthdvY6ja3cRxJbzLKpHYqQR/KgD7d8WXtxpng3XL+zk8q5trCeaF9oO11jYqcHIOCB1r5x8J/F7x1qXjLQ7C71zzLW51CCGaP7JANyNIoYZCZ6E9K+h/Hf/JPfEv8A2Crr/wBFNXyB4E/5KH4a/wCwra/+jVoA+3elfIX/AAur4hH/AJmH/wAk7f8A+Ir6/r4AoA+3/Hf/ACTzxL/2Crr/ANFNXxTp99caXqVrqFo4S5tZkniYgHa6kMpweDyBX2t47/5J54l/7BV1/wCimr488GQQ3Xjrw/b3ESSwy6lbJJHIoZXUyqCCD1BHagC1J8Q/Gcs/mt4p1ffnOEu3Vf8AvkHH6V6t8NPjhfXGp2+i+KpY5Y7hhHDqGAjI54AkxwQeBuwMd89vdb3SdO1GwexvLC3ntWUqYZIwVx9McV8N6tYPpWsXunSHL2lxJAxxjJRipP6UAfeA6mlqjot//amg6dqBGPtVtHPj03KG/rV6gDhfiR8SrDwBp8YMYutVuFJt7UNjj++57L+pPA7kfNWpfFDxtqk5lm8S6hFnottKYFH4JisrxZrcviLxXqmrTOXNzcMyZ7IDhB+CgCvq/wCHngPTPB/hyzRbKJtTeJXurlkBdnIyQCeijOABjpnqSaAPFPhZ4+8a6n460vRn1ye6tJ5SZ0ugJT5aqWbDEbgcA9+pr6E8WeKdO8HaDNq+pyERIdqRpgtK56KoPfg/gCa0E0uwTUBfrY2wvApQXAiUSbTjI3YzjgflXzJ+0DrMt98Qhppc+Rp1uiKnYM4DsfqQVH4CgDE174v+NNcuXkGsS6fCT8sFiTEqD03D5j+JpuhfF7xrodykn9szX0QPzQ3zecGH1PzD8DXrXwC8G6fF4Y/4Sa5tY5r66mdYHkUN5Uanb8uehLA8+mKT4+eDNPl8Mf8ACS2trFDe2kqLO8ahfNjc7fmx1IYrg+maAPT/AAl4q07xj4fh1fTXJjf5ZI3xuicdVbHf+Ywa4L42+O9e8Fw6MmhzxQNe+f5sjxByNnl4xnIH3z29K8x/Z/1iWx+In9nBz5Oo20iMnYsg3g/UAMPxrqP2menhb/t7/wDaNAHlV18SfGt25aTxRqin/plcNH+i4rQ0P4u+NNDuUlGtXF9GD88N85mVx6ZPzD8CK9D/AGaEVm8TsUBZfsuDjkf67/Csn9ojw9ZaV4g0rU7O3jgbUIpFmEagBnjK/MfchwPwFAHOeIfjR4z12d/K1I6ZbE/LDZfIQP8Af+8T+OPYVnaT8VPG2k3Cyx+Iry4APMd3IZlYenzZI/DBr0z9myK2nTX2ktoGngeApMYwXUMHyA3UD5elY/7RHh6y0rxDpep2dvHA2oQyLMI1ADPGV+bA7kOPyoA958HeKbPxj4ZtdZsgUWUFZYicmKQfeUn+vcEHvW9Xg/7NFyzWHiO1LHZHLbyAdgWDg/8AoI/Kvb9QvoNL026v7ptsFtC00jeiqMn+VAHyl8bvEP8AbnxIu4I33W+mqLNMH+Icv+O4kf8AARXr/wAAfD39leBG1OVMT6pKZMnr5SZVB+e4/Q18vRpNeXaRoGlnmcKB1Z2J/mSa+8rW1hsrOC0t0CQwRrHGo/hVRgD8hQBNRRRQAUUUUAFFFFABRRRQAUUUUAFfAZBRirKQwPIPBBr78rwz4r/Bi61jUZ/EPhmNGuZjvurIsF3t3dCeMnuDjnnvQBa0/wCAngPV7CG+sNa1i4tplDJJHcQkEf8Afrj6VO/7O3g2JGeTVNaVFGSzXEIA/wDIVfNt3p19p8pivbK4tpAcFJomQj8CKuab4Z13WHCabo9/dFjjMUDMPxOMD8aAPtHwx4cs/Cfh200SweZ7W1DhGmILncxY5IAHVj2FfDFff1fEP/CCeL/+hV1z/wAF8v8A8TQB9v18QeO/+Sh+Jf8AsK3X/o1q+3sivj3xp4L8U3XjrxDc2/hrWJYJdTuXjkjsZWV1MrEEELggjkGgD6j8C/8AJPPDX/YKtf8A0UtfEs00lxNJPM7ySyMWd3bJZjyST6k19u+DIJrXwL4et7iKSGeLTbZJI5FKsjCJQQQeQQeK+X/G/wAJfEPhXUrh7WwuNQ0osTDcwIZCqdhIBypA74wexoA91i+BngGK2ET6VNNIBjznu5dxPrgMFz+FfJFSGNw+wo27ptxzWvp/g/xJqxH2DQdRuFP8SWzlR9WxgUAfYnjv/knniX/sFXX/AKKavkDwJ/yULw1/2FbX/wBGrX2L4vtJ9Q8Fa7ZWsRlubjT7iKKMdXdo2AH4kivi++8M67pb4v8ARtQtiP8AntbOv8xQB9yXFzBZ20lzczJDBEpeSSRsKoHUk9q+FNY1B9W1q/1KRdr3dzJOwHYuxbH61Bb2lzdTCO2t5ZpM4CxoWbP0Fe3fDL4Jag+pQaz4rg+z20LCSKxfBeVgcgyD+Ff9k8nvjuAe8eH7BtK8N6Xpz/etLSKA/VEC/wBK+EsV9+9K+O/GXwt8R+E9RnAsLi800OTDeQRl1Kdt+PunHUH9aAPsTcCMgg18ClWQlWUhhwQR0NfSH7OGn+T4X1m/ZSGnvFh9OI0B/wDah/Kq/wAWPg1c6zqM/iHwyiPdTfPdWZIXzGxy6E8ZPcHqee9AFnTvgL4C1ewhvtP1vWLm1mXdHLHcQkEf9+vzHbvViT9nfwZDG0kuq60iKMszXEIAHufLr5uvNPvdOm8q9s57aQcFJomRh+BxVvTvDGu6u6rp+j39yW7x27EficYH1oA+0fC/h2z8J+HbTRLB53trUNsadgXO5yxyQAOrHtWvRRQByPxSsW1H4YeIYFBJFqZcD/YIf/2Wvl34XXy6d8TvD87HAN2sOT28wFP/AGavs6SNJYnjkUMjgqynoQe1fB17ZXWkapcWdwpiurSZo3HdXU4P6igD7zr4h8d/8lC8S/8AYVuv/RrV9p6ZqNvq+l2mo2j77e6iWaNv9lgCP518keM/Bfiq68deIbi38M6zNBLqdy8ckdhKyuplYgghcEEc5oA+o/An/JPPDX/YKtf/AEUtfMHxt/5K9rv/AG7/APoiOvqLwZBNa+BfD1tcRSQzxabbJJHIpVkYRqCCDyCCMYr51+L/AIT8R6n8UtZvLDw/qt1ayeTsmgs5HRsQxg4IGDyCPwoA9n+CX/JINC/7eP8A0okr5j8fxPH8RfEqspBOp3Dc+hkYj+Yr6k+ENheaZ8LNGs7+0ntbqPz98NxGUdczyEZU4IyCD+Ned/Gn4V6nqmrv4n0C2a6aVALy2j5k3KMB1H8XAAIHOeecnAB6n8OL6C/+G3h2W3kV0SwhhYg9GRQjD8CprqK+B5IJo5fKkhkSQcbGUg/livo79nix1fTtJ1mHUNOurW2lkilt3niKCQ4YNjP0XmgDw3x9E0XxE8SKwwTqly34GRiP0NfWfw3voL/4b+HZYJFdY7CGFiD0eNQjD81NeXfGn4WanqmrN4n0C2a6eVALy2j/ANZuUYDqP4uAAQOcjPOTjwF4JopfKkhkSQHGxlIP5UAfe+RjPaviLx3/AMlD8S/9hW6/9GtXun7PFjq+naRrMWo6ddWtrLJFLbvPEUEhIYNjPXoteF+O/wDkofiX/sK3X/o1qAPr7wIQfh54aGf+YVa/+ilrwj9orXrXUPE2maTbypI+nQyGYqc7HkK/KfcBAfxrye70jUrCGCe70+6gimRZYpJYWVZFYZDKSMEEEVr+GfAfiTxZdxw6ZpkxiJ+a5lUpEg9Sx4/AZNAHs/7NlhJFoWu6iy4Se4jhU46+WpJ/9GCvUfFPg7QvGNitrrVkswQny5VO2SIn+6w6fTkHjir2gaJZeG9BtNIsEK21qmxc9WPUsfcnJPua+SPGGl+LLDxhqur3WmanYPcXcsyzLG4ADMSMOvHTHQ0AdT44+BOpeG9MutW0q/TULG2RpZY5F2SxoOSfRgByTwfauS+Fuq3Ok/ErQpbd2X7RdJayAHhkkIQg/mD9QKy59f8AE+twGxuNW1i/jf8A5YSXEsob/gJJr174PfCXVbLXYPEniG1azS2y9rayD9474wGYfwgZyAeScenIB7frviPR/DNiLzWtQhs4CdqtITlj6KByx+go0LxHo/iaxN7o2oRXkAO1mjyCp9GU4IPsRXh37RPhzVbjVtO123t5Z9PjtPs8pjUsIWDs2Wx0BDdeny0v7Ovh3VbfVdQ12e3lh0+S1+zxNIpAmYurZX1ACkZ6fN9aAJP2kPEPOk+HIn9bycfmqf8As/6VR/Zz8O/aNb1LxBMmUtIxbwEj+N+WI9wox/wOvLfGevN4n8Y6rrDElbmdjHntGPlQf98gV9a/Dbw7/wAIv8P9J05023Bi864BHPmP8zA/TO3/AIDQB1deAftH+Iv+QT4cif1vJwPxVP8A2f8ASvf6+H/GWvN4m8Y6rrDElbmcmLPaMfKg/BQKAPUf2cvD32nXNS8Qyx/u7SIW8BI/5aPyxHuFGP8Agde2ePdEfxF4E1nSoRmae3JiHq64ZR+agVX+G3h3/hGPAGk6c6bLgxCa4BHPmP8AMQfpnH4V1ZoA+FfD+tXXhvX7HWbPHn2kokUHow7qfYgkH619Az/tIeH1st1vompyXWP9XIY0jz/vgk/+O1veMPgp4a8VXst/E02mX0pLSSW4GyRj1LIe/uMZOc5rjx+zvpGlQzX2seJ7iSyt0aWTyrZYSFUZOWLN2HpQB8+n2r7e8d/8k88S/wDYKuv/AEU1fE1vC1zdQwIpZ5HVFUdyTivtzxnBNdeBfENvbwyTTzabcxxxRqWZ2MTAAAckk9qAPj3wJ/yULw1/2FbX/wBGrX2F42jabwF4iiQZd9LuVUDuTE1fLPgzwX4ptfHXh64uPDesQwRalbPJJJYyqqKJFJJJXAAAzX2AwDqQwBGMEHvQB8OeE7yHTvGOh31ywSC21CCaRj0CrIpJ/IGvuXvXxr4x+GXiLwffypNYzXVhuPlXkEZdGXtux90+x98ZGDXKW1ld3switbWaeVjgJFGWJ/ACgD7V8d/8k88S/wDYKuv/AEU1fIPgT/koXhr/ALCtr/6NWvq7UI7+8+Dt1BNaz/2nNoLxvb7CZDMbcgrtHJO7tXzb4M8F+KrXx14euLjw1rMMEWp2zySSWEqqiiVSSSVwABzmgD7Cr4Br7+zjmviH/hBPF/8A0Kut/wDgvl/+JoA+xfGMDXXgjX7dAS8um3CKB3JjYV8X+Gr9NJ8U6RqM3+qtL2Gd/ojhj/KvuknJxXyL46+EniDwpqM72djPf6SWJhuIELlF9HA5BHr0P6AA+usjFfANSCORn2+W5fONuOa3NN8D+KdXYCx8P6jKp/j+zsqf99EAfrQB9f8Ajv8A5J54l/7BV1/6KavkDwJ/yULw1/2FbX/0atfYvi+0n1DwVrtlaxGW5uNPuIoox1d2jYAfiSK+L77wzrulvi/0bULYj/ntbOv8xQB9yXFzBZ20lzczJDBEpeSSRsKoHUk9q+FNY1B9W1q/1KRdr3dzJOwHYuxbH61Bb2lzdTCO2t5ZpM4CxoWbP0Fe3fDL4Jag+pQaz4rg+z20LCSKxfBeVgcgyD+Ff9k8nvjuAe12k9j4O8F6eurXkNpb2FnDBJLKwABVAuPc8dB1p3h3xj4e8WRSPoeqQ3flY8xQCrrnplWAIHvivNv2g/DmqavoelX+nwS3EVhJL9ojiUsQHC4fA7Dafpn61wHwH8OardePbfWo7aVNOs45fMnYEI5ZCgQHucnPHTH0oA811vT30nXtQ06RSHtbmSEg/wCyxH9K+0fB/iWz8V+GLLVbWZHMkaiZQeY5ABuU+hB/TB6GuB+LfwkfxfINb0QxprCIElic7VuVHTnoGA454IwOMV816ho+p6TIYtS066tJF42zwsh/UUAfd2QOpFfKXx90yWy+Js92yny7+2imRscHavlkf+OfqK5LwnpWuTeIdOvNK0m9u3tbmKYeRAzAbWDZJAwOnevq/wCIHgaz8eeHjp1w4huYm8y1uNuTE+MdO6noR9D1AoA5H9n/AMQ2t/4EGi+aovNOlfMWeTG7Fw30yzD8PpR8f/ENrY+BDovmqbzUZkAiz8wRGDlsemVUfjXzvrvhLX/DV09vq2lXVuVON5QmNvdWHB/Om6J4T17xJdJBpGlXVyWON6xkIvuzn5QPqaAO1+Ammy3nxPtrpVOyxt5pmbsMqYx/6H+ldh+0z08Lf9vf/tGvUvh74Gs/Afh0WELia7mPmXVxjHmP2x6KBwPxPevPf2htB1jWx4c/snSr6/8AJ+0+Z9lt3l2Z8rGdoOM4OPpQBn/sy9fFP/bp/wC1qP2mv+ZX/wC3v/2jWh+zzoWr6J/wkf8Aa2lX1h532byvtVu0W/Hm5xuAzjI/Oj9obQtX1v8A4Rz+ydKvr/yftPm/Zbd5dmfKxnaDjOD+VAGf+zL/AMzT/wBun/taj9prr4X/AO3v/wBo1ofs86FrGif8JH/a2lX1h532Xy/tVu8W/Hm5xuAzjIz9RR+0NoWr63/wjn9laVfX/lfafM+y27y7M+VjO0HGcH8jQBn/ALM3TxTn/p0/9rVv/tCeIv7O8G2+ixPibU5vnH/TKPDH/wAe2frVD9nnQtX0T/hI/wC1tKvrDzvs3lfard4t+3zc43AZxkfnXlfxg8Q/8JD8SNSkR91vZkWcP0ThvzcsfxoA1fgT4e/tn4hRXsibrfS4zcHPI3n5UH1ySw/3a+rq81+Bvh3+xPh1b3UibbjU3N0+Rzs+6g+m0Bv+BGvSh70AFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAJj3oxS0UAFIBjvS0UAJijbS0UAJilxmiigBMe9GKWigBMUtFFAARmkAxS0UAFJilooATFLiiigAoxRRQAmKWiigArwv4+fD97yAeLtNiLTQIEv0UctGPuyf8B6H2x6GvdKKAPGv2fIvEdr4euotQs3i0SRhNYyynaxY/e2r1KHg54Gc4zk49lpMUtACY560Y9TmlooAQDFKeaKKACkxzmlooATHuaWiigBMV8R+Ov+SheJT/ANRW6/8ARrV9u0mKAMDwL/yT3w1z/wAwq1/9FLW/ijApaAExS4oooAKTFLRQAm0GuX+IniD/AIRbwHqupq+24WExW5HXzH+VSPoTn8DXU0mKAPh3wjoL+JvFml6MgbF1OquR1EY5cj6KGP4V9x0YooA5f4ieIf8AhF/Aeramj7Z1hMcB7+Y/yqfwJz+FfInhDQX8TeLtL0dASt1OqyEdVjHLn8FBNfcZGaTHOaAFpsjiONnIOFBJwMninUUAfCFnqmqaPKwsr68sZAfm8mVojn3wRT9R17V9YCjU9Vvr4KcqLm4eTB/4ETX3XiloA+cPgz8LNQm1q28S65aPbWdqfMtYZ1w80nZtp5CjqCepxjIr6P8A0oxRQAm3ilHFJmlFACY5paKKACiiigApMdaWigBMUEZpaKADHvSYpaKAExS0UUABGaQDFLRQAUgUClooATHHWloooATFLRSZwcUAGKXHvRRQAUhGaWigBMe9GPelooATFGKWigDP1vVYdC0K+1W4P7mzgeZh0ztBOB7npXwzZWc+o6hb2Vsu+e4lWKNfVmIAH5mvvbqMUmM0AJHGkUaxRqERAFVQMAAdAKdRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFVrvULKwXfeXlvbL6zSqg/WsyPxl4WllEUfiXR3lPARb6Ik/huoA3KKQHP0o780ALRSUtABSHijPHXtXkf7RMskfw8svLkZd+qRq20kZHlS8H24oA9c5yRS18wfs5f8lCv/APsFSf8Ao2Kvp7nk0AcP8VfGt/4D8LW+p6fbW9xPNeLbbbgMVAKO2cAgn7g7964n4P8AxM8S+NvG15ZavcQG0j095lhhhCAOJIwDnlujHv3rQ/aO/wCSe6f/ANhWP/0VLXnX7Pl3bWPjvUZru4hghGlSZklcIo/exdzQB9SUVkWfinw9qMvlWOvaXcyE4CQXcbnP0DVr0AFFFQ3V1b2cJnuriKCFfvSSuFUfUnigCaismx8TaBqc/kWGuabdzf8APOC7SRvyBJrWoAKKSqF5rukacxW+1WxtiOonuUTH5mgDQorGt/Fvhu8lEVr4i0meQnASO9jYk+mA1bIoAKKKKACiioLm8trOLzbm5hgj/vSuFH5mgCeisL/hNPCu/Z/wk+jb/wC79viz/wChVto6yIro6srDIKnINADq+XfEvx58Vz+IJ20W4hstNjlKxR+QkhkUHq5YE5PoMYz+NfUR6V414j/Z80nWNem1Gw1aXTop5DJLbiASKGJydh3DaD6c4+nFAHpfhLxBH4p8KadrccXlC7i3NHnOxgSGGe+CDXhnjD47eKdO8SatpFhbabBFZXk1skpiZ3IRyuTlsdvSvoLSdMtNG0m102wiEVpbRiOJAc4A9+59TXxZ47/5KF4l/wCwrdf+jWoA+x/Cl9can4O0PULuTzLm60+CaZ9oG52jVmOBwOSeleP/ABK+M/iLwv4z1DQNLtNPWK2EeJpo2dzujV/7wHVsdK9Y8C/8k98Nf9gq1/8ARS18wfGz/kr2uf8Abv8A+k8dAH0h8MNc1DxJ8O9L1fVZhNe3HneZIECg7ZnUcAAdABXXVwHwS/5JDoX/AG8f+j5K7+gAoqlf6xpmlKG1HUrSzU9DcTrGD/30aTT9Y0zVkL6bqVneqOrW06yAfkTQBeopOaOvQ9aAFooHSk/GgBaKKTPPFAC1Fc3EVpaTXNw4jhhQySOeiqBkk/gKk9a8r+PniL+yPAX9mxPi41SURYzz5a/Mx/PaP+BUAfPB8deMZpePFGtlnPAW/lHJ7ABv0r7UgjSw0+GJ7hmSGJUMs75ZsDGWY9T7mvk74KeHf7e+JFlJIm6304G8kyOMqcIP++yp/A161+0d/wAk8sP+wrH/AOipaAPRZfGHhm3OJ/EekRH0e9jH82qWz8TaDqDhLLW9NuWPRYbtHP6GvibRNG1DxDq0Gl6Vbm4vZ9wjiDqu7apY8sQBgAnr2ra134c+LfDdm13quiXEFsv3pUZZVX/eKEgfjigD7T5xWL4v8Qp4U8J6jrckXm/ZItyx5wGYkKoJ9CxAr5T+HfxF1PwRrMGJ5JdIkcC5tCSV2k8so7MOvHXGK+utV0uz1rSrrTL+LzbS5jMciZ6g+nofQ9qAPmzwz8ePFUPiG3Ot3EN7p00oSWPyEQxqTyVKgHI9Dn+tfRviHU5NG8M6rqsUaySWVnNcKjHhiiFgD+VeXeHP2ftI0XXodSvtWl1GKCQSw23kCNSQcrvO47hnnAxn6cV6J46/5J54m/7BV1/6KagDwjQvjf4x17xlolhJJZW1rdahBDLHb2/3kaRQwyxYjgnkV9L18Q+Bf+SheGv+wra/+jVr7fNAHypqH7QPja7yLc6fYjsYbfcR/wB9lv5V9VV8A19/UAFFIT3rHufFnhuynNvdeIdKgmHBjlvY1YfgWzQBs0VHFNHcRLLDIkkbchkYEH6EU8nFAC0U3nI9KXPFAC0Un60UALXzP45+NHiuHxe1pZw/2TDplyQbVgGaYqefMPdSOw4wep4NfTFeEftF+FlksrDxRbx/vImFpckDqpyUY/Q5H/AhQB7TpGq2muaRa6pYSiW1uYxJG49D2PoR0I9avV86fs6+KWt9VvvDM7/urlftNuCeBIvDAfVcH/gFfRdABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFHaijtQB8BV2178JPHen2jXM/h2cxKMt5Uscrf98oxP6VxFff1AHxP4M8b6v4J1dLzTp2MJYfaLVmPlzL6Edj6MOR9Mg/U3xG0ePxb8NdTgtiJme3FzasvO5lw64/3gMf8Cr5m+LGmwaV8UNetbZQsXnrMFHQGRFkOPbLGvpP4O3Ml18J9BklJLCKSMZ9EldR+gFAHy14H1z/AIRvxtpGrFtscFwvmn/pm3yv/wCOlq+2Lq5isrOe6nbbDBG0kjeiqMk/kK+IvGPh+Twt4u1LRpAdttMRGT/FGeUP4qQa968e+PF1D4BWmoJKPtWsJHavtPRx/rePT5GH4j1oA+bIo5J50ijUtJIwVVHUk9BX0z+0PDHb/DXS4IlCRx6nEiKOgAhlAFeV/A/w+db+JFpPIm6301Gu3JHG4cJ+O4g/8BNer/tHf8k8sP8AsKx/+ipaAPm3TNJ1HWLhrfS9Pur6dULtFawtKwXIGSFB4yRz7iuv8GeDfFNr468Pz3HhrWIYYtSt5JJJLCVVRRIpJJK8ACuj/Zx/5KHf/wDYKk/9GxV9P0AeQftHf8k90/8A7Csf/oqWvnXQfD2reJ9QNjo1lJeXIjMhRCBhcgZJPA5YfnX0V+0d/wAk90//ALCsf/oqWvP/ANnH/koWof8AYKk/9GxUAefeI/BniHwnJGNc0uWzEpwjlldGPoGUkZ9s17J+z742urmW48KX0zSxxxedZs5yUAIDR/TkEemDXcfHGFJfhLq7sMmJ4HXPY+ci/wAmNeDfBFivxc0QDownB/78SH+lAH0/4y8UW3g/wte61cjf5C4jizgySHhV/E9fQAntXxlPPq/ijXDJK1zqOp3cmB1d3Y9gB29hwK9z/aXuZEsfDlqCfLklnkYdsqIwP/QzWd+zXYwyar4gvmUGaCGGJD3AcuT/AOgLQB5h4h8A+KfC1ql1rGjzW1u5AEoZZEBPQFlJA/GvT/gb8SrtNUi8J6xcPNbTgixllbLRuBny8nqpAOPQ8Drx79qdlDqWl3dhcKGguIWikUjgqwIP86+FLG5ks7+3uoSRLDKsiEdiCCP1FAH1v8bf+SQ67/27/wDo+OvlDRtHv/EGqw6Zplsbm9n3eXEGALYUseTgdATX1f8AG3/kkOu/9u//AKPjrwD4Jf8AJXtD/wC3j/0RJQBi+IfAHinwrbLda1o01tbsQvmh0kQE9AWQkA/Wu++A3ja70/xKnhi6nZ9Ovt3kKx4hlAJ+X0DYII9ce9e7/ECFJ/h14kRwCBptw4z6rGWH6gV8jeAnKfETw0VOD/alsPwMqg0AfblFeAftNf8AMrf9vf8A7RrwCgD7/FfAFe//ALM3TxR/26f+1q8AoA7qX4O+P4rdpn8OSlAMkJPEzfgoYk/gK57w94k1bwnqyahpN29vOhwy/wAMg7qy9CPr9RivuWvlP4/6dBYfEoywqFN5ZRXEgA/iyyfyQUAfTuiava69o1nqtk5a2u4hKmeoz1B9wePwr4w8d/8AJQ/Ev/YVuv8A0a1fQ37PFzJP8N5o3JKwahLGmewKI382NfPPjv8A5KH4l/7Ct1/6NagD6/8AAn/JPPDX/YKtf/RS18geO/8AkoXiX/sK3X/o1q+v/An/ACT3w1/2CrX/ANFLXyB47/5KF4l/7Ct1/wCjWoA+v/Av/JPfDX/YKtf/AEUtfMHxt/5K9rn/AG7/APoiOvp7wL/yT3w1/wBgq1/9FLXzD8bP+Sva5/27/wDoiOgD3/4Jf8kh0L/t4/8AR8lUfjH8Q5fBWiQ2emOq6vf5ET4B8iMfefB784H4ntir3wS/5JDoX/bx/wCj5K8H+OlzJP8AFjU43JKwRQRp7Dylb+bGgDj9D8Oa14pvmtdHsJr24A3vsxhfdmJwPxIo1zw5rXhe/W11iwmsbj7yb8YYDurDg/UGvp34E2UNr8KrGaNcPdzTSyH1YSMg/RBR8drKG6+FeoTSqDJazQyxH0YyKn8nNAEfwb+IcvjTRJrLU3DavYACR8Y8+M8K+PUdD+B718+/E7RD4f8AiLrVmqbYmnM8XHGyT5wB9M4/Ctj4G3EkHxY0uNCQs8c8b+48pm/morv/ANpHQd9rpHiCNOY2azmb2PzJ/J/zFAHqfw81j+3vh9oeoFt8j2qxyN6unyMf++lNfJfj/Wv+Eh8e61qatujluWWI+safIn/jqivWvgb4vi0jwF4niuWBGlg3yKx+8rIRtH/AkH4tXjOkeH7zWdM1i+tlzFpdsLiXjqC6rj8izfRTQB9ffDnWP7d+Hmh35bc7WqxyHuXT5GP4lSfxr5D8Xav/AG94w1fVA25Lq7kkjP8AsbjtH/fOK9h+CHi+PR/AHimKdhnTFN9GrH7wZMbR/wACQf8AfVeVfD7QD4l8eaRpbLuhecSTD/pmnzN+ikfjQB9i+HNITQPDWm6THjFpbJESO7Acn8Tk/jXy18bfEP8AbnxIvIY33W+nKLNP95eX/wDHyw/AV9XajfwaVpl3qF022C1haaRv9lQSf5V8IxRzXl2kUYaSaZwqjqWYnA/HJoA+o/gF4e/snwCdSkTE+qTGXPfy1+VB+e4/8Cqr+0d/yTyw/wCwrH/6Klr1q1tobO0htbdAkMCLHGg/hUDAH5V5L+0d/wAk8sP+wrH/AOipaAPIPgl/yV7Qv+3j/wBJ5K+rdd1Kx0jQr2+1J0SzhiZpd+MMMfd9yemO+a+ExirVppt9qEgjsrK5uXPRYYmcn8AKAKlfb3jv/knviX/sFXX/AKKavFPht8D9SfVbbWPFUC21rAwkSxYhnlYcjeBwq9OOp6YFe1+O/wDknvib/sFXX/opqAPkHwJ/yULw1/2FbX/0atfX/jv/AJJ74m/7BV1/6KavkDwJ/wAlC8Nf9hW1/wDRq19f+O/+Se+Jv+wVdf8AopqAPkDwJ/yULw1/2FbX/wBGrX2/2r4g8C/8lC8Nf9hW1/8ARq19vnpQB8AV9/V8A19/etAHyz8Y/iVd+I9cudC064aPRrRzG3ltj7TIpILE91B4A6HGe4xyejfDbxhr+mrqOm6FPNaMMpIzIgceqhiCw+ma5Pmvv1RgADjjigD4a0LX9X8J60l9ptxJaXcLYdTwGAPKOvceoP8AOvsS6Fl478ATC2cNa6tYt5bMPull4z7q36ivnj9oKyhtfiUssSgNdWMU0hHdtzp/JFr1H9ni5kn+G00chJWDUZY4/ZSiN/NjQB85eEtXOg+L9I1UttW2uo5HP+xu+Yfiua+yvGWsjw94M1fVd+17a1doz/00Iwg/76Ir5B+IGg/8I1481jSwmyKO4Lwjt5b/ADJ+SsB+Fet/G3ximq/DnwzFA4B1dVvZVU9FVB8p/wCBP+a0AeMeE9X/ALB8XaTquSEtbqOR/dNw3D8s19f/ABE1f+wvh7rt+G2utq0cbejv8in/AL6YV8f6v4evNF07R7y5XEeqWpuYuMYG9lx9cBW+jCvY/jb4wTWPh74WigcZ1RVvpFU9AqAbT/wJz+K0AeafC/Q/+Eg+I+i2bLuhScTygjI2R/OQfY4A/GvrDxrpiaz4J1rT2UMZrOUJn++Fyp/BgK8l/Zu0HZaav4glTmR1s4GPXC/M/wCeU/75Ne6ylRE5f7oU5+lAHw/4O1JtI8aaLfqcCC9iZv8Ad3AMPyzX3H+Oa+BIgzSoF+8WGD7199mgAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACjtRR2oA+Al69M+3rX1m3x28BfYzONSuDKBkW/wBkk3k+mcbf1r5MBxXs91+zf4jScraazpUsOeGmMkbfkFb+dAHlfiLWrjxH4hv9YuVCzXkzSlF6KD0UewGB+FfZ3gzRW8O+DNI0mQAS21siy46eYRl//Hia4LwH8DdM8LX0WqatdDU9QiO6JRHthib1weWI7E4x6ZANem6lqdno+nS6hqNwlvaQgGSVuignHP4kUAeQftDeFDf6JaeJbaPM1gfJuMd4WPB/Bj/4+fSvnJ7mZ7SK1aRjBG7SJGTwrMAGI+oVfyr688V/FDwdpvh27kGrafqjyRMiWdtOspmJGNrbSdqnPJPbPWvk/QtGuvEGu2Wk2S7p7qVY144XPUn2AyT9KAPpz4E+Gzonw+S+lTbc6rIbg5HPljhB+WWH+9TP2gLCS8+GRmRSRaXsU747Ahk/m4r1FEWONURQqqMADoBTbi3iureW3njSWGVCkkbjKspGCCO4xQB8dfCzxjb+CPGkepXqSNZSwtb3BjGWVWIIIHfDKv4Zr6GHxr8DzX1raWmoz3U1zMkK+XbOoUswUE7wvAz78V55rn7N14Ll20HW4GgJysV8rKyj03KDu/75FQaV+zfrBuFbU9esrdFOc2iPK34bguKAOu/aO/5J7p//AGFY/wD0VLXAfs4jHxCv8/8AQKk/9GxV7d8RvAv/AAsDw9b6V/aP2DyrpbnzfI83OEdduNy/385z2rA+HHwgPgDxDcasdd+3+datb+V9k8rGWRt2d7f3MYx3oA0PjZ/ySHXf+3f/ANHx14B8Ev8Akr2hf9vH/oiSvp7xt4Z/4THwhfaD9s+x/avL/f8AleZt2yK/3cjOduOveuA8E/Az/hDvF1jr3/CRfbPsvmfuPsPl7t0bJ97zDjG7PTtQBd+PPhqbXPAgvrWMvPpcv2hlAyTERh8fT5W+imvCvhf46HgPxSbyeKSawuY/JuY48FgM5DKO5B/QmvsbHNeKeLf2erDUbqS88OX4055CSbWVS0IJ/ukcqPbn2wOKAHePPjl4ePhm7svDdzLeX93E0Ky+S0aQhhgt84BJwTjGef18W+HHhubxT470uwSMtAkqz3JxkLEhBbPpn7o92Fd/Y/s3+IJLgDUNb0yGHPLQeZK2PoVUfrXu/hXwho/gzSf7P0e28tCd0krnMkrert3/AJDsKAOd+NpB+EOuj/r3/wDR8deAfBP/AJK9of8A28f+iJK+nvG3hj/hMfCF9oP2v7H9q8v9/wCX5m3bIr/dyM5246964DwT8Df+EO8XWOvf8JH9r+y+Z+5+xeXv3Rsn3vMOMbs9D0oA9A8df8k98S/9gq6/9FNXyB4FH/FwvDX/AGFbX/0atfZ2u6Z/bXh7UtK87yfttrLbebt3bN6Fd2MjOM5xkV5BoX7PX9i+IdN1X/hKPO+xXUVz5X2Dbv2MGxnzDjOMZxQB7fnPTmiiigBMHnFfAVff4r4AoA+s7X47+A57QTS6hc20mMmCW0kLfTKgr+tfOXj3xZJ418XXestE0MT4jgiY5KRrwAfc8k+5Nejah+zdrsc5Gm63p08PY3KvE35KG/nXT+Dv2fbHSr2O+8R3qai8ZDJaxIViyP7xPLD2wB656UAdj8H9Bl8P/DbTILhClzchrqRSMEFzkAjt8u38a+W/HfPxD8S/9hW6/wDRrV9ugYrxHXf2ef7a8Q6nqv8AwlHk/bbuW58r7Bu2b3LYz5gzjOM45oA9Q8C/8k88Nf8AYKtf/RS18i/EGB7f4jeJEkUgnUp3/BnLD9CK+ydC0z+xfD2maT53nfYrWK283bt37EC7sZOM4zjJrjPiF8JdK8dypffaHsNURQn2hE3LIo6B1yM47EEH60AZXwj+Juja1o+j+GHM0Or29sLdYzGSkixp94MMgfKvfHNePfHG3eH4s6s7KQsyQOp9R5KL/NTXq3wu+EGq+B/F0+rale2FzD9leGEQM+8MzLyQVAAwCOp612Pj/wCHGk+P7OJbx3tr2DPkXcQyyg/wsP4l7449iMnIB518E/iZo9roWn+EdQ82C+WdorVhGXSXzHLAEjODliOeMY5rD/aK8Ny2viKy8RRRk215EIJWA+7KmcZ+q4x/uGt/wN8DtY8LeO9P1m81HT7mytGdysZcSElGC8FcdSD1r2y+sbbUrKazvYI7i2mQpJFIuVdT2IoA+b/g/wDFfT/CWnzaHr3mpYtKZbe4RC/lE43KQOccZGM8k+tJ8Yvivp3i3T4dD0AyvZCQSz3DoU8wgHaqg845ycgcgVq65+zdc/aXfQNbhMDHKxXylWT23IDn8hTdE/Zuuzco+v63AsAOWisVZmb23MBj8jQBR/Z28NzXXiW88QyRkWtlEYI2I4aV8ZwfZc5/3hXu/jPQR4n8G6ro5AMlzAwiz0Eg+ZD/AN9AVq2Fha6ZYw2Vjbx29tCuyOKNcKo9qsUAfA8U8sMcyRSMqTJskAPDruDYP4qD+Ar6g+B3heO1+GMsl5CCdaeR5FI5MONij6EBj/wKsPU/2b7e81S7ubTxJ9kt5pmkjt/sG/ylJyFz5gzjpnFe329tFaWkVtAgSGFBHGo6KoGAPyFAHwdvubI3NsS8RceVMnTIDBtp/wCBKD+Ar339m/w6Y7XVvEcycyMLO3JHYYZ/wJ2D/gJrV8W/AODxL4ov9Zg1/wCwreSea0H2LzNrEfMd28Zycnp3r1Hw7olv4b8OWGjWpzFaQrGGxjee7Y7EnJ/GgDzH9obxD/Z3g+10WJ8TalPlwP8AnlHgn/x4p+RrzP4D+Hf7Z+IKX8ibrfS4jOcjjzD8qD65JYf7tZPxf8QnxF8SNSlR91vZt9jg9AEyGx9WLH8a96+B3h3+w/hzbXMibbnU3N2+RzsPCD6bQG/4EaAPSa8g/aN/5J5Yf9hWP/0VLXr9cf8AEfwL/wALA8PQaV/aP2DybpbnzfI83OEdduNy4+/nOe1AHzj8Ev8Akruh/wDbf/0RJX19XkHgn4Gnwf4vsdfPiP7Z9l8z9x9i8vdujZPveYcY3Z6dq9foAK5/x3/yT3xL/wBgq6/9FNXQVn67pn9t+HtT0rzvJ+3Wktt5u3ds3oV3YyM4znGRQB8Y+Bf+SheGv+wra/8Ao1a+xPGUD3XgfxBbxgmSXTbhFA7kxsBXlWhfs8/2J4h03Vf+Eo8/7FdRXPlf2ft37HDbc+YcZxjODXt/86APg/RdRfR9d0/U0Te9ncx3AQnG4owbH6V9reGPFWk+L9I/tPR5nltg5jYvGyFXABIwRz94cjivIvE37OcNzeS3PhzVVtY3JItbpSyofQOOcexBPua9N+HXha48HeCbLRruSGS6iaR5XhJKMzOSMEgHoQOnagD4sHFfa/g3x3ovjmzmuNIkmL2+3z4poirRls4BPQ9D0J6V594y+AFjrWoTajoeoLp08zF5LeSPdEWPUrjlR3xzXVfCfwJe+AvDt3Y6jNazXVxdmbfbMxXZtUAfMoOcg/nQB8peIdDuvDniC+0i8Uia1laMnGAw7MPYjBH1r6S8P/Hvwnd6NFLrNxLYagqATQiB5FZu5QqDwffGP1rrvGngDRfHVgsGqRMk8QPkXUJxJHn3PUeoP6GvErn9m/xIs5FrrGkyxZ4aUyI2PoEb+dAHnfjjxTN408W3mtSRmJJSFhiJz5cajCj69z7k19TfCjw5L4Y+HenWdyhS6mBuZ0IwVZ+dp9wu0H6VzHgn4D6T4eu49R1q5/tW8jYNHHs2woR3IPLn64HtXrg6UAeB/tH+HS0OleI4k5QmznI9Dlk/Xf8AmK8GDXV89ragvMygQwR+mWJ2j6sxP1NfcHiXQoPE3hrUNFuW2x3cJj34zsbqrY74IBx7V5j4Q+AkPhjxTY61Prwv1tHMiwfYvLBbBCnPmHoSD07UAWvjr4Yju/htDcWcIB0Z0ZFUdISAhA/8dP0WvmCSeWaOFJJGZYU2Rg/wKWLYH4sx/GvvG8tIb6yuLO4QPBcRtFIp7qwwR+RrxTSP2crew1ezvLvxH9st4Jklkt/sGzzQpzt3eYcA9OlAHq3g3QV8MeD9L0cAB7aBVkx0Mh5c/ixJqt4/1ZNE8Aa5fs20paOiHP8AG42L/wCPMK6Wvnr9orxWs1zY+FraQHySLq7wejEEIp/Alv8AgS0AeUeA9LbWfHmh2KruEl5Gzjr8inc3/joNfW/jvxjbeB/DE2r3ELTuHWKGFW2+ZIc4GewwCSfQV5N+zr4TZft3im5jwrA2loT35zIw/ID/AL6r1nx34OtfHPhibR7iYwPvWWGYLu8uQZwcdxgkEehoA8y8BfHa717xLa6Pr2n2sIvJBHBPa7lCufuqysTnJ4zngkcV7lXhvgH4D3eg+JbbWNd1G1mFnIJYILTcwZx90szAYwecAdcc17lQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFGe1FGM0AfII+CfxCOP+Kf/wDJyD/4uvr6kxS0AFcV8U/DereLfBUukaO1us8k8bus7lQ6L82AQDzkL1x0rtaTAoA+SIvgb4/kmEb6PFEuceY95EVHvwxP6V738NvhlY+AbKSRpRd6tcACa524Cr12ID0Hqep/AAd7RQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAZr5B/4Ul8Q/8AoX//ACdt/wD45X16RmlxQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABUN1JNFZzyW8JnnSNmjiDBfMYDhcngZPGTU1FAHyGvwR+ILMAdBC5PLG8g49+Hr66jRI41jjUKiAKqgYAA6ClxS0AFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABXgHi74A3l74lW80PUhJZ3lxuuhdNmWDccs4P8Y69cHOOvJHv9GOc0AQWdpBYWcNpaRLFbwoI441GAqgYAFT0UUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFAH/9k=' class BackgroundIndexFileGenerator: def __init__(self, dir_path): self.dir_path = dir_path self.process = Process(target=self._process, args=()) self.process.daemon = True def _process(self): _create_index_files(self.dir_path) def run(self): self.process.start() def _clean_up(paths): """ Clean up after ourselves, removing created files. @param {[String]} A list of file paths specifying the files we've created during run. Will all be deleted. @return {None} """ print('Cleaning up') # Iterate over the given paths, unlinking them for path in paths: print('Removing %s' % path) os.unlink(path) def _create_index_file( root_dir, location, image_files, dirs, force_no_processing=False): """ Create an index file in the given location, supplying known lists of present image files and subdirectories. @param {String} root_dir - The root directory of the entire crawl. Used to ascertain whether the given location is the top level. @param {String} location - The current directory of the crawl. The index file will be created here. @param {[String]} image_files - A list of image file names in the location. These will be displayed in the index file's gallery. @param {[String]} dirs - The subdirectories of the location directory. These will be displayed as links further down the file structure. @param {Boolean=False} force_no_processing - If True, do not attempt to actually process thumbnails, PIL images or anything. Simply index <img> tags with original file src attributes. @return {String} The full path (location plus filename) of the newly created index file. Intended for usage cleaning up created files. """ # Put together HTML as a list of the lines we'll want to include # Issue #2 exists to do this better than HTML in-code header_text = \ 'imageMe: ' + location + ' [' + str(len(image_files)) + ' image(s)]' html = [ '<!DOCTYPE html>', '<html>', ' <head>', ' <title>imageMe</title>' ' <style>', ' html, body {margin: 0;padding: 0;}', ' .header {text-align: left; display: inline-block;}', ' .content {', ' padding: 3em;', ' padding-left: 4em;', ' padding-right: 4em;', ' }', ' .image {max-width: 100%; border-radius: 0.3em;}', ' td {width: ' + str(100.0 / IMAGES_PER_ROW) + '%;}', ' </style>', ' </head>', ' <body>', ' <div class="content">', ' <h2 class="header">' + header_text + '</h2>' ] # Populate the present subdirectories - this includes '..' unless we're at # the top level directories = [] if root_dir != location: directories = ['..'] directories += dirs if len(directories) > 0: html.append('<hr>') # For each subdirectory, include a link to its index file for directory in directories: link = directory + '/' + INDEX_FILE_NAME html += [ ' <h3 class="header">', ' <a href="' + link + '">' + directory + '</a>', ' </h3>' ] # Populate the image gallery table # Counter to cycle down through table rows table_row_count = 1 html += ['<hr>', '<table>'] # For each image file, potentially create a new <tr> and create a new <td> for image_file in image_files: if table_row_count == 1: html.append('<tr>') img_src = _get_thumbnail_src_from_file( location, image_file, force_no_processing ) link_target = _get_image_link_target_from_file( location, image_file, force_no_processing ) html += [ ' <td>', ' <a href="' + link_target + '">', ' <img class="image" src="' + img_src + '">', ' </a>', ' </td>' ] if table_row_count == IMAGES_PER_ROW: table_row_count = 0 html.append('</tr>') table_row_count += 1 html += ['</tr>', '</table>'] html += [ ' </div>', ' </body>', '</html>' ] # Actually create the file, now we've put together the HTML content index_file_path = _get_index_file_path(location) print('Creating index file %s' % index_file_path) index_file = open(index_file_path, 'w') index_file.write('\n'.join(html)) index_file.close() # Return the path for cleaning up later return index_file_path def _create_index_files(root_dir, force_no_processing=False): """ Crawl the root directory downwards, generating an index HTML file in each directory on the way down. @param {String} root_dir - The top level directory to crawl down from. In normal usage, this will be '.'. @param {Boolean=False} force_no_processing - If True, do not attempt to actually process thumbnails, PIL images or anything. Simply index <img> tags with original file src attributes. @return {[String]} Full file paths of all created files. """ # Initialise list of created file paths to build up as we make them created_files = [] # Walk the root dir downwards, creating index files as we go for here, dirs, files in os.walk(root_dir): print('Processing %s' % here) # Sort the subdirectories by name dirs = sorted(dirs) # Get image files - all files in the directory matching IMAGE_FILE_REGEX image_files = [f for f in files if re.match(IMAGE_FILE_REGEX, f)] # Sort the image files by name image_files = sorted(image_files) # Create this directory's index file and add its name to the created # files list created_files.append( _create_index_file( root_dir, here, image_files, dirs, force_no_processing ) ) # Return the list of created files return created_files def _get_image_from_file(dir_path, image_file): """ Get an instance of PIL.Image from the given file. @param {String} dir_path - The directory containing the image file @param {String} image_file - The filename of the image file within dir_path @return {PIL.Image} An instance of the image file as a PIL Image, or None if the functionality is not available. This could be because PIL is not present, or because it can't process the given file type. """ # Save ourselves the effort if PIL is not present, and return None now if not PIL_ENABLED: return None # Put together full path path = os.path.join(dir_path, image_file) # Try to read the image img = None try: img = Image.open(path) except IOError as exptn: print('Error loading image file %s: %s' % (path, exptn)) # Return image or None return img def _get_image_link_target_from_file(dir_path, image_file, force_no_processing=False): """ Get the value to be used as the href for links from thumbnail images. For most image formats this will simply be the image file name itself. However, some image formats (tif) are not natively displayable by many browsers and therefore we must link to image data in another format. @param {String} dir_path - The directory containing the image file @param {String} image_file - The filename of the image file within dir_path @param {Boolean=False} force_no_processing - If True, do not attempt to actually process a thumbnail, PIL image or anything. Simply return the image filename as src. @return {String} The href to use. """ # If we've specified to force no processing, just return the image filename if force_no_processing: return image_file # First try to get an image img = _get_image_from_file(dir_path, image_file) # If format is directly displayable in-browser, just return the filename # Else, we need to return a full-sized chunk of displayable image data if img.format.lower() in ['tif', 'tiff']: return _get_image_src_from_file( dir_path, image_file, force_no_processing ) return image_file def _get_image_src_from_file(dir_path, image_file, force_no_processing=False): """ Get base-64 encoded data as a string for the given image file's full image, for use directly in HTML <img> tags, or a path to the original if image scaling is not supported. This is a full-sized version of _get_thumbnail_src_from_file, for use in image formats which cannot be displayed directly in-browser, and therefore need processed versions even at full size. @param {String} dir_path - The directory containing the image file @param {String} image_file - The filename of the image file within dir_path @param {Boolean=False} force_no_processing - If True, do not attempt to actually process a thumbnail, PIL image or anything. Simply return the image filename as src. @return {String} The base-64 encoded image data string, or path to the file itself if not supported. """ # If we've specified to force no processing, just return the image filename if force_no_processing: if image_file.endswith('tif') or image_file.endswith('tiff'): return UNSUPPORTED_IMAGE_TYPE_DATA return image_file # First try to get an image img = _get_image_from_file(dir_path, image_file) return _get_src_from_image(img, image_file) def _get_index_file_path(location): """ Get the full file path to be used for an index file in the given location. Yields location plus the constant INDEX_FILE_NAME. @param {String} location - A directory location in which we want to create a new index file. @return {String} A file path for usage with a new index file. """ return os.path.join(location, INDEX_FILE_NAME) def _get_server_port(): """ Get the port specified for the server to run on. If given as the first command line argument, we'll use that. Else we'll default to 8000. @return {Integer} The port to run the server on. Default 8000, overridden by first command line argument. """ # return int(sys.argv[1]) if len(sys.argv) >= 2 else 8000 return args.port def _get_src_from_image(img, fallback_image_file): """ Get base-64 encoded data as a string for the given image. Fallback to return fallback_image_file if cannot get the image data or img is None. @param {Image} img - The PIL Image to get src data for @param {String} fallback_image_file - The filename of the image file, to be used when image data capture fails @return {String} The base-64 encoded image data string, or path to the file itself if not supported. """ # If the image is None, then we can't process, so we should return the # path to the file itself if img is None: return fallback_image_file # Target format should be the same as the original image format, unless it's # a TIF/TIFF, which can't be displayed by most browsers; we convert these # to jpeg target_format = img.format if target_format.lower() in ['tif', 'tiff']: target_format = 'JPEG' # If we have an actual Image, great - put together the base64 image string try: bytesio = io.BytesIO() img.save(bytesio, target_format) byte_value = bytesio.getvalue() b64 = base64.b64encode(byte_value) return 'data:image/%s;base64,%s' % (target_format.lower(), b64) except IOError as exptn: print('IOError while saving image bytes: %s' % exptn) return fallback_image_file def _get_thumbnail_image_from_file(dir_path, image_file): """ Get a PIL.Image from the given image file which has been scaled down to THUMBNAIL_WIDTH wide. @param {String} dir_path - The directory containing the image file @param {String} image_file - The filename of the image file within dir_path @return {PIL.Image} An instance of the thumbnail as a PIL Image, or None if the functionality is not available. See _get_image_from_file for details. """ # Get image img = _get_image_from_file(dir_path, image_file) # If it's not supported, exit now if img is None: return None if img.format.lower() == 'gif': return None # Get image dimensions img_width, img_height = img.size # We need to perform a resize - first, work out the scale ratio to take the # image width to THUMBNAIL_WIDTH (THUMBNAIL_WIDTH:img_width ratio) scale_ratio = THUMBNAIL_WIDTH / float(img_width) # Work out target image height based on the scale ratio target_height = int(scale_ratio * img_height) # Perform the resize try: img.thumbnail((THUMBNAIL_WIDTH, target_height), resample=RESAMPLE) except IOError as exptn: print('WARNING: IOError when thumbnailing %s/%s: %s' % ( dir_path, image_file, exptn )) return None # Return the resized image return img def _get_thumbnail_src_from_file(dir_path, image_file, force_no_processing=False): """ Get base-64 encoded data as a string for the given image file's thumbnail, for use directly in HTML <img> tags, or a path to the original if image scaling is not supported. @param {String} dir_path - The directory containing the image file @param {String} image_file - The filename of the image file within dir_path @param {Boolean=False} force_no_processing - If True, do not attempt to actually process a thumbnail, PIL image or anything. Simply return the image filename as src. @return {String} The base-64 encoded image data string, or path to the file itself if not supported. """ # If we've specified to force no processing, just return the image filename if force_no_processing: if image_file.endswith('tif') or image_file.endswith('tiff'): return UNSUPPORTED_IMAGE_TYPE_DATA return image_file # First try to get a thumbnail image img = _get_thumbnail_image_from_file(dir_path, image_file) return _get_src_from_image(img, image_file) def _run_server(): """ Run the image server. This is blocking. Will handle user KeyboardInterrupt and other exceptions appropriately and return control once the server is stopped. @return {None} """ # Get the port to run on port = _get_server_port() # Configure allow_reuse_address to make re-runs of the script less painful - # if this is not True then waiting for the address to be freed after the # last run can block a subsequent run socketserver.TCPServer.allow_reuse_address = True # multi thread server server = ThreadingTCPServer(('', port), SimpleHTTPRequestHandler) # Print out before actually running the server (cheeky / optimistic, however # you want to look at it) print('Your images are at http://127.0.0.1:%d/%s' % ( port, INDEX_FILE_NAME )) # Try to run the server try: # Run it - this call blocks until the server is killed server.serve_forever() except KeyboardInterrupt: # This is the expected way of the server being killed, since imageMe is # intended for ad-hoc running from command line print('User interrupted, stopping') except Exception as exptn: # Catch everything else - this will handle shutdowns via other signals # and faults actually starting the server in the first place print(exptn) print('Unhandled exception in server, stopping') def serve_dir(dir_path): """ Generate indexes and run server from the given directory downwards. @param {String} dir_path - The directory path (absolute, or relative to CWD) @return {None} """ # Create index files, and store the list of their paths for cleanup later # This time, force no processing - this gives us a fast first-pass in terms # of page generation, but potentially slow serving for large image files print('Performing first pass index file generation') created_files = _create_index_files(dir_path, True) if (PIL_ENABLED): # If PIL is enabled, we'd like to process the HTML indexes to include # generated thumbnails - this slows down generation so we don't do it # first time around, but now we're serving it's good to do in the # background print('Performing PIL-enchanced optimised index file generation in background') background_indexer = BackgroundIndexFileGenerator(dir_path) background_indexer.run() # Run the server in the current location - this blocks until it's stopped _run_server() # Clean up the index files created earlier so we don't make a mess of # the image directories _clean_up(created_files) if __name__ == '__main__': # Generate indices and serve from the current directory downwards when run # as the entry point parser = argparse.ArgumentParser() parser.add_argument('--images_per_row', '-i', type=int, default=10) parser.add_argument('--port', '-p', type=int, default=8000) args = parser.parse_args() # Constants / configuration ## Filename of the generated index files INDEX_FILE_NAME = 'imageme.html' ## Regex for matching only image files IMAGE_FILE_REGEX = '^.+\.(png|jpg|jpeg|tif|tiff|gif|bmp)$' ## Images per row of the gallery tables IMAGES_PER_ROW = args.images_per_row ## Resampling mode to use when thumbnailing RESAMPLE = None if not PIL_ENABLED else Image.NEAREST ## Width in pixels of thumnbails generated with PIL THUMBNAIL_WIDTH = 800 serve_dir('.')
main.py
#!/usr/bin/env python """Main module.""" import importlib import itertools import logging import multiprocessing import os import pkgutil import signal import sys import threading import time from datetime import datetime, timedelta from ipaddress import ip_address from multiprocessing import Manager, Process import pkg_resources import prometheus_client import scapy import yaml from flask import Flask from flask_httpauth import HTTPBasicAuth from pykwalify.core import Core, SchemaError from scapy.all import conf as scapyconf from waitress import serve from netprobify import common, dynamic_inventories from netprobify.external import percentile from netprobify.metrics import ( APP_HOST_RESOLUTION, APP_HOST_RESOLUTION_CHANGE, APP_PROCESS_TIMED_OUT, APP_RELOAD_CONF_FAILED, APP_ROUND_TIME, APP_TARGET_NAME_DUP, APP_TIME_OOO, ICMP_LOSS, ICMP_LOSS_RATIO, ICMP_ROUND_TRIP, ICMP_SENT, IPERF_BANDWIDTH, IPERF_LOSS, IPERF_LOSS_RATIO, IPERF_OUT_OF_ORDER, IPERF_SENT, LIST_TARGET_MEASUREMENT_METRICS, LIST_TARGET_METRICS, NETPROBIFY_INFO, TCP_LOSS, TCP_LOSS_RATIO, TCP_MATCH_ACK_FAIL, TCP_PORT_MISTMATCH, TCP_ROUND_TRIP, TCP_SENT, THRESHOLD, UDP_UNREACHABLE_LOSS, UDP_UNREACHABLE_LOSS_RATIO, UDP_UNREACHABLE_PORT_MISTMATCH, UDP_UNREACHABLE_ROUND_TRIP, UDP_UNREACHABLE_SENT, ) from netprobify.protocol.common.protocols import list_self_ips from netprobify.protocol.icmp_ping import ICMPping from netprobify.protocol.iperf import Iperf from netprobify.protocol.target import Group from netprobify.protocol.tcpsyn import TCPsyn from netprobify.protocol.udp_unreachable import UDPunreachable from netprobify.settings import DEFAULT_ADDRESS_FAMILY, LOGGING_CONFIG # we configure the logging before loading scapy to avoid warning when on non-ipv6 server logging.config.dictConfig(LOGGING_CONFIG) log = logging.getLogger(__name__) app = Flask(__name__) auth = HTTPBasicAuth() @auth.verify_password def verify_password(username, password): """Check Basic HTTP authentication.""" if not os.getenv("PROM_USER") or not os.getenv("PROM_PASSWORD"): return True return username == os.environ["PROM_USER"] and password == os.environ["PROM_PASSWORD"] @app.route("/metrics") @auth.login_required def get_prometheus_metrics(): """Return Prometheus metrics.""" return prometheus_client.generate_latest() class NetProbify: """Main class for NetProbify app.""" def __init__(self, f="config.yaml"): """Netprobify initialization. Keyword arguments: config_file -- path of config file """ self.config_file = f self.list_targets = [] self.list_target_name = [] self.list_special_targets = [] self.list_dynamic_targets = [] self.list_dynamic_special_targets = [] self.shared_dynamic_targets = {} self.shared_dynamic_targets_backup = {} self.list_groups = [] self.global_vars = {} self.reload_conf_needed = False self.first_iter = True self.seq_gen = None def get_uniq_id(self, max_value): """Generate ID.""" number = 0 while True: if number > max_value: number = 0 # if max reached, we regenerate all targets to avoid overlap self.reload_conf_needed = True # if max reached before sending any packets, we have too much targets if self.first_iter: raise Exception( "Too much targets configured: " "not enough ID available in generator" ) offset = yield number number += offset def instantiate_generator(self): """Instantiate a new generator.""" self.seq_gen = self.get_uniq_id(2 ** 31) self.id_gen = self.get_uniq_id(2 ** 16 - 1) next(self.seq_gen) next(self.id_gen) def getter(self, name): """Get global variable (useful for unit tests only).""" return globals()[name] def load_target_conf(self, target, target_name, target_groups): """Load target from a dict. Keyword arguments: target -- dict containing description of the target """ if target["type"] == "TCPsyn": # create target target = TCPsyn( name=target_name, active=True, description=target.get("description", target_name), destination=None, config_destination=target["destination"], address_family=target.get("address_family", DEFAULT_ADDRESS_FAMILY), dont_fragment=target.get("dont_fragment", True), is_subnet=target.get("is_subnet", False), nb_packets=target.get("nb_packets", 1), interval=self.global_vars.get("interval_packets", 0), timeout=target.get("timeout", 1), dst_port=target["dst_port"], ip_payload_size=target.get("ip_payload_size"), threshold=target.get("threshold"), state=target.get("state"), alert_level=target.get("alert_level", "no_alert"), is_dynamic=target.get("is_dynamic", False), # dns_update interval is global if not specified dns_update_interval=target.get( "dns_update_interval", self.global_vars.get("dns_update_interval", 0) ), groups=target_groups, creation_date=target.get("creation_date"), lifetime=target.get("lifetime"), ) elif target["type"] == "ICMPping": # create target target = ICMPping( name=target_name, active=True, description=target.get("description", target_name), destination=None, config_destination=target["destination"], address_family=target.get("address_family", DEFAULT_ADDRESS_FAMILY), dont_fragment=target.get("dont_fragment", True), is_subnet=target.get("is_subnet", False), nb_packets=target.get("nb_packets", 1), interval=self.global_vars.get("interval_packets", 0), timeout=target.get("timeout", 1), ip_payload_size=target.get("ip_payload_size"), threshold=target.get("threshold"), state=target.get("state"), alert_level=target.get("alert_level", "no_alert"), is_dynamic=target.get("is_dynamic", False), # dns_update interval is global if not specified dns_update_interval=target.get( "dns_update_interval", self.global_vars.get("dns_update_interval", 0) ), groups=target_groups, creation_date=target.get("creation_date"), lifetime=target.get("lifetime"), ) elif target["type"] == "UDPunreachable": # create target target = UDPunreachable( name=target_name, active=True, description=target.get("description", target_name), destination=None, config_destination=target["destination"], address_family=target.get("address_family", DEFAULT_ADDRESS_FAMILY), dont_fragment=target.get("dont_fragment", True), is_subnet=target.get("is_subnet", False), nb_packets=target.get("nb_packets", 1), interval=self.global_vars.get("interval_packets", 0), timeout=target.get("timeout", 1), dst_port=target["dst_port"], ip_payload_size=target.get("ip_payload_size"), threshold=target.get("threshold"), state=target.get("state"), alert_level=target.get("alert_level", "no_alert"), is_dynamic=target.get("is_dynamic", False), # dns_update interval is global if not specified dns_update_interval=target.get( "dns_update_interval", self.global_vars.get("dns_update_interval", 0) ), groups=target_groups, creation_date=target.get("creation_date"), lifetime=target.get("lifetime"), ) elif target["type"] == "iperf": target = Iperf( name=target_name, active=True, description=target.get("description", target_name), destination=None, config_destination=target["destination"], address_family=target.get("address_family", DEFAULT_ADDRESS_FAMILY), dst_port=target["dst_port"], threshold=target.get("threshold"), state=target.get("state"), alert_level=target.get("alert_level", "no_alert"), is_dynamic=target.get("is_dynamic", False), # dns_update interval is global if not specified dns_update_interval=target.get( "dns_update_interval", self.global_vars.get("dns_update_interval", 0) ), groups=target_groups, duration=target.get("iperf_parameters", {}).get("duration", 5), bandwidth=target.get("iperf_parameters", {}).get("bandwidth_per_stream", "1M"), protocol=target.get("iperf_parameters", {}).get("protocol", "udp"), num_streams=target.get("iperf_parameters", {}).get("nb_parallel_streams", 1), creation_date=target.get("creation_date"), lifetime=target.get("lifetime"), ) else: return # we put the target in the right list if target.is_dynamic: if target.is_special: self.list_dynamic_special_targets.append(target) else: self.list_dynamic_targets.append(target) else: if target.is_special: self.list_special_targets.append(target) else: self.list_targets.append(target) def load_conf(self, schema_file="schema_config.yaml"): """Load the configuration from a config file. Keyword arguments: schema_file -- relative/absolute path and filename for yaml schema """ log.debug("Loading configuration") # cleaning targets list self.list_groups = [] self.list_targets = [] self.list_special_targets = [] self.list_target_name = [] self.global_vars = {} self.first_iter = True # instantiate a new generator self.instantiate_generator() # validate yaml config with the schema schema = pkg_resources.resource_filename(__name__, schema_file) yaml_validator = Core(source_file=self.config_file, schema_files=[schema]) yaml_validator.validate(raise_exception=True) # we load the configuration from the file with open(self.config_file, "r") as conf_file: # load as a yaml conf = yaml.safe_load(conf_file) # get global variables self.global_vars = conf["global"] # setting logging level log.setLevel(self.global_vars["logging_level"]) # setting default percentile values if needed if self.global_vars.get("percentile") is None: self.global_vars["percentile"] = [95, 50] # get groups for group_name in conf["groups"]: group = conf["groups"][group_name] self.list_groups.append( Group( name=group_name, src_ipv4=group.get("src_ipv4", group.get("src_ip")), src_ipv6=group.get("src_ipv6"), src_subnet_ipv4=group.get("src_subnet_ipv4"), src_subnet_ipv6=group.get("src_subnet_ipv6"), src_port_a=group.get("src_port_a", 65000), src_port_z=group.get("src_port_z", 65001), ip_payload_size=group.get("ip_payload_size"), dscp=group.get("dscp", 0), permit_target_auto_register=group.get("permit_target_auto_register", True), ) ) # check targets are set if not conf.get("targets"): return # get target list for target_name in conf["targets"]: target = conf["targets"][target_name] if not target.get("address_family"): try: ip = ip_address(target.get("destination")) target["address_family"] = "ipv{}".format(ip.version) except ValueError: log.debug( "was not able to detect address-family from destination" ", setting to default (%s)", DEFAULT_ADDRESS_FAMILY, ) target["address_family"] = DEFAULT_ADDRESS_FAMILY if target_name in self.list_target_name: log.warning("Duplicate target name %s", target_name) APP_TARGET_NAME_DUP.labels(target_name=target_name).inc(1) else: self.list_target_name.append(target_name) # manage group association target_groups = set() # we register to all group if allowed if target.get("auto_register_to_groups", True): for grp in self.list_groups: if grp.permit_target_auto_register: target_groups.add(grp.name) # we explicitly register to a group for grp in target.get("explicit_groups", {}).get("register_to", []): if grp in conf["groups"]: target_groups.add(grp) else: log.warning( "Trying to associate '%s' to an inexistant group: %s", target_name, grp ) # we remove the target from a group for grp in target.get("explicit_groups", {}).get("exclude_from", []): try: target_groups.remove(grp) except Exception: log.info("Failed to remove target %s from %s", target_name, grp) # create target objects self.load_target_conf(target, target_name, target_groups) log.debug("Target %s created", target_name) if len(target_groups) == 0 and target["type"] != "iperf": log.warning("Target %s disabled: not associated to any group", target_name) def update_hosts(self, force=False): """Update targets with host resolution. force -- force all target to update (after a reload conf for example) """ # we get our local IP addresses self_ips = {af: list_self_ips(af, scapyconf) for af in ("ipv4", "ipv6")} for target in itertools.chain( self.list_targets, self.list_special_targets, self.list_dynamic_targets, self.list_dynamic_special_targets, ): if len(target.groups): changed = False if not target.is_dynamic: # we update all target if needed if ( not force and target.dns_update_interval > 0 and target.time_to_refresh > time.time() ): continue log.debug("%s: updating", target.name) # we try to resolve the hostname if not a subnet if not target.is_subnet: new_ip = common.resolve_hostname( target.config_destination, target.address_family ) else: new_ip = target.config_destination # if the resolution failed if new_ip is None: # we disable the target and register the failure target.active = False APP_HOST_RESOLUTION.labels( probe_name=self.global_vars["probe_name"], destination=target.name, address_family=target.address_family, ).set(0) log.warning("Hostname resolution failed for %s", target.name) elif new_ip != target.destination: # we enable the target and register the change and success target.active = True changed = True APP_HOST_RESOLUTION.labels( probe_name=self.global_vars["probe_name"], destination=target.name, address_family=target.address_family, ).set(1) # we update the destination of the target target.destination = new_ip # we prevent the probe to ping itself to avoid issues (known bug) if new_ip in self_ips[target.address_family]: target.active = False log.info("Disabling %s because destination is the local machine", target.name) # we generate the packets for the target if active if target.active and changed: log.debug("%s: changed. New IP address is %s", target.name, new_ip) if isinstance(target, TCPsyn): target.generate_packets( self.list_groups, self.seq_gen, self.global_vars["logging_level"] ) elif isinstance(target, UDPunreachable): target.generate_packets( self.list_groups, self.id_gen, self.global_vars["logging_level"] ) elif isinstance(target, ICMPping): target.generate_packets(self.list_groups, self.global_vars["logging_level"]) APP_HOST_RESOLUTION_CHANGE.labels( probe_name=self.global_vars["probe_name"], destination=target.name, address_family=target.address_family, ).inc() # we expose threshold if target is active if target.active and target.threshold: # we clear the previous THRESHOLD metrics in case of changes self.clear_metrics([THRESHOLD], "destination", [target.name]) for threshold_name in target.threshold: for group in target.groups: threshold = target.threshold[threshold_name] THRESHOLD.labels( probe_name=self.global_vars["probe_name"], destination=target.name, address_family=target.address_family, state=target.state, type=threshold_name, alert_level=target.alert_level, group=group, ).set(threshold) target.time_to_refresh = time.time() + target.dns_update_interval else: # if there is no group associated the target is disabled (nothing to target) target.active = False log.warning("%s: disabled because no group associated", target.name) if not target.active: self.clear_metrics(LIST_TARGET_MEASUREMENT_METRICS, "destination", [target.name]) def clear_metrics(self, list_prom_obj, label_to_clear, list_value_to_clear): """Clear metrics when target has been deleted. It will clear every metrics contained in list_value_to_clear for the given label_to_clear. Keyword arguments: list_prom_obj -- list of prometheus object which has to be cleaned label_to_clear -- list of label of the value to clear (example: "destination") list_value_to_clear -- list of value to clear (example: "google") """ log.debug("Cleaning metrics") # we clean the metrics from each Prometheus objects for prom in list_prom_obj: # if the metric contains the label to clear if label_to_clear in prom.__dict__["_labelnames"]: # we record the position of the label value in the tuple index_to_clear = prom.__dict__["_labelnames"].index(label_to_clear) metrics_to_clear = [] for metrics in prom.__dict__["_metrics"]: # we store the tuple of the metric if the label value matches if metrics[index_to_clear] in list_value_to_clear: metrics_to_clear.append(metrics) # clearing selected metrics for metrics in metrics_to_clear: prom.remove(*metrics) def reload_conf(self): """Reload configuration on the fly.""" log.warning("Reloading configuration...") self.reload_conf_needed = False # we backup the target list list_target_before = set() for target in itertools.chain(self.list_targets, self.list_dynamic_targets): list_target_before.add(target.name) list_group_before = set() for group in self.list_groups: list_group_before.add(group.name) # backup config before reload backup_list_groups = self.list_groups backup_list_targets = self.list_targets backup_list_target_name = self.list_target_name backup_global_vars = self.global_vars try: # reloading configuration, it also reset the list of probes/groups self.load_conf() except Exception: # we rollback self.list_groups = backup_list_groups self.list_targets = backup_list_targets self.list_target_name = backup_list_target_name self.global_vars = backup_global_vars log.exception("Error: configuration reload failed. Rollback.") APP_RELOAD_CONF_FAILED.labels(probe_name=self.global_vars["probe_name"]).set(1) else: # if config load succeeded: # updating static targets self.update_hosts(True) # we get the new targets list list_target_after = set() for target in itertools.chain(self.list_targets, self.list_dynamic_targets): list_target_after.add(target.name) list_group_after = set() for group in self.list_groups: list_group_after.add(group.name) # we clean metrics for removed objects target_to_clean = list_target_before - list_target_after groups_to_clean = list_group_before - list_group_after percentile_to_clean = set(backup_global_vars["percentile"]) - set( self.global_vars["percentile"] ) percentile_to_clean_str = set() for per in percentile_to_clean: percentile_to_clean_str.add(str(per)) # get reference list of Prometheus objects self.clear_metrics(LIST_TARGET_METRICS, "destination", target_to_clean) self.clear_metrics(LIST_TARGET_METRICS, "group", groups_to_clean) self.clear_metrics(LIST_TARGET_METRICS, "percentile", percentile_to_clean_str) # if probe_name changed, we clean all target metrics if backup_global_vars["probe_name"] != self.global_vars["probe_name"]: self.clear_metrics( LIST_TARGET_METRICS, "probe_name", backup_global_vars["probe_name"] ) log.warning("Configuration reloaded") APP_RELOAD_CONF_FAILED.labels(probe_name=self.global_vars["probe_name"]).set(0) def get_metrics(self): """Get metrics, calculate and expose.""" log.debug("Updating metrics") # we get the values and add it to prometheus metrics for res in self.result: log.debug("%s: updating metrics", res["name"]) for grp in self.list_groups: if grp.name in res["groups"]: sent = 0 loss = 0 timestamp_ooo = 0 latency = [] if res["probing_type"] in ("TCPsyn", "UDPunreachable"): # get some metrics if res["probing_type"] == "TCPsyn": match_fail = res["match_fail"] port_mismatch = res["port_mismatch"] # we get all metrics for the group port range for port in range(grp.src_port_a, grp.src_port_z + 1): sent += res[port]["sent"] loss += res[port]["loss"] latency += res[port]["latency"] timestamp_ooo += res[port]["timestamp_ooo"] elif res["probing_type"] == "ICMPping": sent = res[grp.dscp]["sent"] loss = res[grp.dscp]["loss"] latency = res[grp.dscp]["latency"] timestamp_ooo = res[grp.dscp]["timestamp_ooo"] name = res["name"] address_family = res["address_family"] # we calculate the percentile values requested in config file latency.sort() results_percentile = {} for percent in self.global_vars["percentile"]: results_percentile[percent] = percentile(latency, percent=percent / 100) if results_percentile[percent] is None: results_percentile[percent] = 0 APP_TIME_OOO.labels( probe_name=self.global_vars["probe_name"], destination=name, address_family=address_family, group=grp.name, ).inc(timestamp_ooo) if res["probing_type"] == "TCPsyn": TCP_SENT.labels( probe_name=self.global_vars["probe_name"], destination=name, address_family=address_family, state=res["state"], group=grp.name, ).inc(sent) TCP_LOSS.labels( probe_name=self.global_vars["probe_name"], destination=name, address_family=address_family, state=res["state"], group=grp.name, ).inc(loss) TCP_LOSS_RATIO.labels( probe_name=self.global_vars["probe_name"], destination=name, address_family=address_family, state=res["state"], group=grp.name, ).set(loss / sent) for percent, result in results_percentile.items(): TCP_ROUND_TRIP.labels( probe_name=self.global_vars["probe_name"], destination=name, address_family=address_family, state=res["state"], group=grp.name, percentile=percent, ).set(result) TCP_MATCH_ACK_FAIL.labels( probe_name=self.global_vars["probe_name"], destination=name, address_family=address_family, ).inc(match_fail) TCP_PORT_MISTMATCH.labels( probe_name=self.global_vars["probe_name"], destination=name, address_family=address_family, ).inc(port_mismatch) elif res["probing_type"] == "ICMPping": ICMP_SENT.labels( probe_name=self.global_vars["probe_name"], destination=name, address_family=address_family, state=res["state"], group=grp.name, ).inc(sent) ICMP_LOSS.labels( probe_name=self.global_vars["probe_name"], destination=name, address_family=address_family, state=res["state"], group=grp.name, ).inc(loss) ICMP_LOSS_RATIO.labels( probe_name=self.global_vars["probe_name"], destination=name, address_family=address_family, state=res["state"], group=grp.name, ).set(loss / sent) for percent, result in results_percentile.items(): ICMP_ROUND_TRIP.labels( probe_name=self.global_vars["probe_name"], destination=name, address_family=address_family, state=res["state"], group=grp.name, percentile=percent, ).set(result) elif res["probing_type"] == "UDPunreachable": UDP_UNREACHABLE_SENT.labels( probe_name=self.global_vars["probe_name"], destination=name, address_family=address_family, state=res["state"], group=grp.name, ).inc(sent) UDP_UNREACHABLE_LOSS.labels( probe_name=self.global_vars["probe_name"], destination=name, address_family=address_family, state=res["state"], group=grp.name, ).inc(loss) UDP_UNREACHABLE_LOSS_RATIO.labels( probe_name=self.global_vars["probe_name"], destination=name, address_family=address_family, state=res["state"], group=grp.name, ).set(loss / sent) for percent, result in results_percentile.items(): UDP_UNREACHABLE_ROUND_TRIP.labels( probe_name=self.global_vars["probe_name"], destination=name, address_family=address_family, state=res["state"], group=grp.name, percentile=percent, ).set(result) UDP_UNREACHABLE_PORT_MISTMATCH.labels( probe_name=self.global_vars["probe_name"], destination=name, address_family=address_family, ).inc(port_mismatch) elif res["probing_type"] == "iperf": loss_ratio = res["loss"] / res["sent"] if res["sent"] != 0 else 0 IPERF_SENT.labels( probe_name=self.global_vars["probe_name"], destination=name, address_family=address_family, state=res["state"], group=grp.name, ).set(res["sent"]) IPERF_LOSS.labels( probe_name=self.global_vars["probe_name"], destination=name, address_family=address_family, state=res["state"], group=grp.name, ).set(res["loss"]) IPERF_LOSS_RATIO.labels( probe_name=self.global_vars["probe_name"], destination=name, address_family=address_family, state=res["state"], group=grp.name, ).set(loss_ratio) IPERF_BANDWIDTH.labels( probe_name=self.global_vars["probe_name"], destination=name, address_family=address_family, state=res["state"], group=grp.name, ).set(res["bandwidth"]) IPERF_OUT_OF_ORDER.labels( probe_name=self.global_vars["probe_name"], destination=name, address_family=address_family, state=res["state"], group=grp.name, ).set(res["out_of_order"]) def reload_request(self, signum, frame): """Reload handler for SIGHUP. Will reload the configuration. Keyword arguments: signum -- signal number frame -- None or a frame object. Represents execution frames """ log.warning("Reload requested...") self.reload_conf_needed = True def stop_request(self, signum, frame): """Stop handler for SIGTERM and SIGINT. Keyword arguments: signum -- signal number frame -- None or a frame object. Represents execution frames """ log.warning("Process %i exiting...", os.getpid()) for child in multiprocessing.active_children(): child.terminate() os._exit(0) def check_expiration(self, target): """Check if the target is expired. Keyword arguments: target -- target to check """ # if there is no value set we consider it is not expired if ( not target.get("lifetime") or not target.get("creation_date") or target.get("lifetime") == timedelta(0) ): return False expiration_date = target["creation_date"] + target.get("lifetime") return datetime.now() > expiration_date def get_dynamic_targets(self): """Get targets from dynamic inventories.""" log.debug("Getting dynamic targets") # cleaning outdated targets for inventory in self.shared_dynamic_targets.keys(): dynamic_targets = self.shared_dynamic_targets[inventory] for target in dynamic_targets: if self.check_expiration(target): log.info("{}: {} is expired".format(inventory, target["hostname"])) dynamic_targets.remove(target) continue self.shared_dynamic_targets[inventory] = dynamic_targets # we check if the targets changed if self.shared_dynamic_targets_backup.__eq__(self.shared_dynamic_targets.copy()): # we do not need to load the targets log.debug("No dynamic targets changes") return log.debug("Loading new dynamic targets") # we reset the targets self.list_dynamic_targets = [] self.list_dynamic_special_targets = [] # get targets by inventory for inventory in self.shared_dynamic_targets.keys(): for target in self.shared_dynamic_targets[inventory]: # specific parameters for dynamic targets target_name = "{0}_{1}".format(inventory, target["hostname"]) target["description"] = "from_{0}".format(inventory) target["is_dynamic"] = True # create target objects self.load_target_conf(target, target_name, target.get("groups")) # we clean the targets removed or changed for inventory in self.shared_dynamic_targets_backup.keys(): # we browse all targets in the previous target list for target in self.shared_dynamic_targets_backup.get(inventory, []): # we check if target is still here and not changed if target not in self.shared_dynamic_targets.get(inventory, []): # we clear Prometheus metrics target_name = "{0}_{1}".format(inventory, target["hostname"]) log.debug("%s: cleaning metrics (dynamic target)", target_name) self.clear_metrics(LIST_TARGET_METRICS, "destination", target_name) # we backup the targets list self.shared_dynamic_targets_backup = self.shared_dynamic_targets.copy() log.debug("Dynamic targets loaded and backup done") def get_dynamic_inventories(self, path, import_path): """Get module path and name of dynamic inventories.""" list_module = [] for _, module_name, is_pkg in pkgutil.iter_modules(path): if module_name in self.global_vars.get("disable_dynamic_inventories", []): log.warning("Dynamic inventory '%s' disabled", module_name) continue if is_pkg: pkg_path = ["{}/{}".format(path[0], module_name)] pkg_import_path = "{}.{}".format(import_path, module_name) list_rec = self.get_dynamic_inventories(pkg_path, pkg_import_path) list_module.extend(list_rec) else: module_path = "{}.{}".format(import_path, module_name) list_module.append((module_name, module_path)) return list_module def load_dynamic_inventories(self): """Load dynamically all inventories in different processes.""" log.debug("Loading dynamic inventories") # creating the manager to share a dictionary manager = Manager() self.shared_dynamic_targets = manager.dict() list_module = self.get_dynamic_inventories(dynamic_inventories.__path__, "") # we run each dynamic inventory modules in different processes for module_name, module_path in list_module: if module_name in self.global_vars.get("disable_dynamic_inventories", []): log.warning("Dynamic inventory '%s' disabled", module_name) continue log.warning("Dynamic inventory '%s' enabled", module_name) # we import the module manually module = importlib.import_module("netprobify.dynamic_inventories{}".format(module_path)) log.info("Loading %s module", module_name) # we start the process using "module".start() process = Process( target=module.start, args=( self.shared_dynamic_targets, module_name, self.global_vars["logging_level"], "{0}.yaml".format(module_name), ), ) process.start() def start_processes(self, target_list, timeout): """Orchestrate probing among processes. Keyword arguments: target_list: list of target objects to run in processes """ jobs = [] manager = Manager() self.result = manager.list() # start the probing in processes for target in target_list: if target.active: # create, reference and start processes process = Process( target=target.send_packets, args=( self.result, self.global_vars["logging_level"], self.list_groups, self.global_vars.get("verbose", 0), self.global_vars.get("l3_raw_socket", False), ), ) jobs.append(process) process.start() # wait for available process while len(jobs) > self.global_vars["nb_proc"] - 1: # check if processes are finished for i in reversed(range(len(jobs))): if not jobs[i].is_alive(): jobs.pop(i) log.debug("No targets in the queue anymore") # wait for all process to finish for j in jobs: j.join(timeout=timeout) # if the timeout is reached and the process is still alive if j.is_alive(): APP_PROCESS_TIMED_OUT.labels(probe_name=self.global_vars["probe_name"]).inc() log.warning("A probing process has timed out") j.terminate() j.join() log.info("All targets have been processed") # get and expose metrics self.get_metrics() def _expose_version(self): try: version = pkg_resources.require("netprobify")[0].version except pkg_resources.DistributionNotFound: with open("VERSION") as ver_file: version = ver_file.read().splitlines()[0] log.info("running netprobify %s, using scapy %s", version, scapy.VERSION) NETPROBIFY_INFO.labels(version=version, scapy_version=scapy.VERSION).set(1) def start_prometheus_server(self): """Start serving Prometheus metrics.""" log.info( "HTTP server started and listening on port %i", self.global_vars["prometheus_port"] ) serve( app, host=self.global_vars.get("prometheus_address", "0.0.0.0"), port=self.global_vars["prometheus_port"], ) def main(self): """Entry point.""" # handling signal signal.signal(signal.SIGTERM, self.stop_request) signal.signal(signal.SIGINT, self.stop_request) signal.signal(signal.SIGHUP, self.reload_request) # load configuration try: self.load_conf() except SchemaError: log.exception("Config file: YAML validation error") os._exit(os.EX_DATAERR) except Exception: log.exception("Error: configuration load failed") os._exit(os.EX_DATAERR) # start dynamic inventories self.load_dynamic_inventories() # DNS resolution, enable healthy targets self.update_hosts(True) # we calculate when we will have to reload the configuration time_to_reload = time.time() + self.global_vars.get("reload_conf_interval", 0) # start prometheus http server thread = threading.Thread(target=self.start_prometheus_server) thread.start() self._expose_version() # initialize the metric APP_RELOAD_CONF_FAILED.labels(probe_name=self.global_vars["probe_name"]).set(0) # starting the loop while True: log.info("Starting new probing iteration") round_start = time.time() # start probing only for standard targets self.start_processes( itertools.chain(self.list_targets, self.list_dynamic_targets), self.global_vars.get("timeout", 3600), ) # start probing only for special targets which are not a priority remaining_time = self.global_vars["interval"] - (time.time() - round_start) self.start_processes(self.list_special_targets, remaining_time) # the first iteration is done self.first_iter = False # get targets from dynamic inventories self.get_dynamic_targets() self.update_hosts() # Reload configuration if interval reached if time.time() > time_to_reload and self.global_vars.get("reload_conf_interval", 0) > 0: self.reload_conf_needed = True # reload configuration if necessary if self.reload_conf_needed: self.reload_conf() time_to_reload = time.time() + self.global_vars.get("reload_conf_interval", 0) # calculate the round time round_duration = time.time() - round_start APP_ROUND_TIME.labels(probe_name=self.global_vars["probe_name"]).set(round_duration) log.info("Probing iteration finished in %i seconds", round_duration) # wait the appropriate time to respect interval set in config time_to_wait = self.global_vars["interval"] - round_duration if time_to_wait > 0: log.info("Waiting %i seconds", time_to_wait) time.sleep(time_to_wait) def entrypoint(): """Entrypoint of the program.""" start = None # load the script with a custom config file if any if len(sys.argv) > 1: f = sys.argv[1] if os.path.isfile(f): start = NetProbify(f) else: start = NetProbify() start.main()
test_isp.py
# -*- coding: utf-8 -*- """ Alle in isp befindlichen Klassen und Funktionen prüfen. Alle Laufzeit Fehlermeldungen sind bei der Testausführung gewollt Nach der Ausführung steht am Ende OK wenn alle Tests durchgefürt wurden. Bei Fehlern in den Überprüfungen steht am Ende:: ====================================================================== FAIL: ....... FAILED (failures=x) """ import os from os import path as osp import site # alle Module auch von der Konsole erreichbar machen ABSPATH = osp.dirname( osp.abspath( __file__) ) base_path = osp.join( ABSPATH , "..") site.addsitedir(base_path) import shutil import unittest import json import time from datetime import datetime import warnings warnings.filterwarnings("ignore") from dotmap import DotMap import threading from isp.config import ispConfig, dict_merge from isp.webapp import ispBaseWebApp from isp.safrs import db, system from testbase import testCaseBase import testdb as testdb from testdummy import dummy import logging logger = logging.getLogger() # ordner test/files files_path = os.path.join( ABSPATH, 'files') if not os.path.exists( files_path ): try: os.makedirs( files_path ) except IOError as e: print("Unable to create dir.", e) # weasyprint logging wp_log_file = os.path.join(files_path, 'weasyprint.log') if os.path.exists( wp_log_file ): os.remove( wp_log_file ) wp_logger = logging.getLogger('weasyprint') wp_logger.addHandler( logging.FileHandler( wp_log_file ) ) wp_logger.setLevel( logging.CRITICAL ) # WARNING, CRITICAL def run( config:dict={} ): ''' Startet ispBaseWebApp mit zusätzlichen config Angaben Parameters ---------- config : dict, optional DESCRIPTION. The default is {}. Returns ------- webApp : ispBaseWebApp Die gestartete WebApplication ''' # Konfiguration öffnen _config = ispConfig( config=config ) _apiConfig = { "models": [ system, dummy, testdb.dbtests, testdb.dbtestsrel ], } _webconfig = { # nur um update von webconfig zu testen "name" : "test_isp", } # Webserver starten webApp = ispBaseWebApp( _config, db, webconfig=_webconfig, apiconfig=_apiConfig ) return webApp class testBase(testCaseBase): ''' setUp(), tearDown(), and __init__() will be called once per test. app: Flask initialisierte Flask app api: SAFRSAPI initialisierte SAFRSAPI ''' @classmethod def setUpClass(cls): ''' Wird beim initialisieren der Testklasse aufgerufen - Api bereitstellen - test Ergebnisse zum Vergleich laden ''' # This attribute controls the maximum length of diffs output by assert methods that report diffs on failure. # It defaults to 80*8 characters cls.maxDiff = None files_path = os.path.join( ABSPATH, 'files') pdf_path = os.path.join( ABSPATH, 'files', 'pdf') if not os.path.exists( files_path ): os.mkdir( files_path ) resources_path = os.path.join( ABSPATH , "resources" ) check_path = os.path.join( resources_path, 'check') if not os.path.exists( check_path ): os.mkdir( check_path ) # alte Datenbank löschen: über Pfad Angaben falls in der config nicht die testdatei steht db_file = os.path.join( files_path, "test_isp.db" ) if os.path.exists( db_file ): os.remove( db_file ) pass dbtests_file = os.path.join( resources_path, "dbtests.json" ) dbtests = [] with open(dbtests_file, 'r') as fp: dbtests = json.load(fp) dbtestsrel_file = os.path.join( resources_path, "dbtestsrel.json" ) dbtestsrel = [] with open(dbtestsrel_file, 'r') as fp: dbtestsrel = json.load(fp) # alle erzeugten pdf und den Pfad pdf löschen if os.path.exists( pdf_path ): shutil.rmtree( pdf_path ) swagger_file = os.path.join( check_path, "swagger_test.json" ) if not os.path.exists( swagger_file ): with open(swagger_file, 'w') as fp: obj = { "info": { "title": "test_isp" } } json.dump(obj, fp, indent=2) # webapp mit unitest config cls.webapp = run( { "loglevel" :{ "safrs" : logging.DEBUG, # 10 "sqlalchemy" : logging.DEBUG, # 10 "webapp" : logging.DEBUG, }, "server" : { "webserver" : { "name" : "swagger_test", "port" : 5001, "TESTING": True, "reloader" : False }, "api": { "DBADMIN": True, "custom_swagger_config": os.path.join( check_path, "swagger_test.json" ) } }, "templates":{ "PDF-HEADER": None }, "database": { "main": "tests", "tests" : { "connection": "sqlite:///{{BASE_DIR}}/tests/files/test_isp.db" } } } ) cls.app = cls.webapp.app cls.api = cls.webapp.api # import sqlalchemy, weasyprint #print( "sqlalchemy.__version__", sqlalchemy.__version__ ) #print( "weasyprint.__version__", weasyprint.__version__ ) # print("###### setUpClass", dbtests( ) ) # n = dbtests( string="test" ) # Grunddaten in die Datenbank laden for d in dbtests: cls.app.post( "api/dbtests/", headers={'Content-Type': 'application/json'}, data=json.dumps({ "data": { "attributes": d, "type":"dbtests" } })) for d in dbtestsrel: cls.app.post( "api/dbtestsrel/", headers={'Content-Type': 'application/json'}, data=json.dumps({ "data": { "attributes": d, "type":"dbtestsrel" } })) @classmethod def tearDownClass(cls): """ config unittest file löschen """ #os.remove( cls.unitest_file ) pass def setUp(self): ''' wird vor jedem test aufgerufen ''' pass def tearDown(self): ''' wird nach jeden test aufgerufen Returns ------- None. ''' #self.app. # close the browser window #self.driver.quit() pass class ispTest( testBase ): def test_config_mqtt(self): '''isp.config ispConfig mit MQTTHandler (isp.mqtt) prüfen immer mit neuen kernel für mqttInitLogging ''' # zuerst ohne parameter aufrufen config = ispConfig( ) # __repr__ testen soll nicht die Klasse sondern die config selbst (dotmap) geben self.assertEqual( repr(config)[:7], 'DotMap(' , "Fehler beim laden __repr__") # Magic Methods prüfen self.assertEqual( config.__dict__["_loadErrors"], [], "Fehler beim laden von _loadErrors") self.assertEqual( config._loadErrors, [], "__getitem__ Fehler bei vorhandenen _loadErrors im Object") self.assertEqual( type(config.test), DotMap, "__getitem__ Fehler bei nicht vorhandenen in der config") # __getattr__ wird bei nicht vorhandenen aufgerufen self.assertEqual( config._test, None, "__getitem__ Fehler bei nicht vorhandenen im Object") # __getitem__ self.assertEqual( config["_loadErrors"], [], "__getitem__ Fehler") # __getitem__ self.assertEqual( type(config["versions"]), DotMap, "__getitem__ mit dotmap Fehler") # __getattr__ mit dotmap (config Values) self.assertEqual( type(config.versions), DotMap, "__getattr__ mit dotmap Fehler") # __setitem__ config["_version"] = '2.unittest' # __setitem__ self.assertEqual( config.__dict__["_version"], '2.unittest', "__setitem__ Fehler") # __setitem__ mit dotmap (config Values) config["unittest"] = '3.unittest' # __setitem__ self.assertEqual( config.unittest, '3.unittest', "__setitem__ mit dotmap Fehler") # __setattr__ config._version = '3.unittest' # __setattr__ self.assertEqual( config.__dict__["_version"], '3.unittest', "__setattr__ Fehler") # Zugiffe auf die config selbst # # komplette config als dict self.assertEqual( type( config.get() ), dict, "komplette config als dict") # config get mit default self.assertEqual( config.get("gibtsnicht", "defaultValue"), 'defaultValue', "config get mit default") # dotmap set oberste ebene config._config["unittest"] = '4.unittest' self.assertEqual( config.get("unittest") , '4.unittest', "dotmap get auf erster ebene") # dotmap set/get auf einer ebene config._config.A.unittest = '4A.unittest' self.assertEqual( config.get("A.unittest") , '4A.unittest', "dotmap get auf zweiter ebene") config._config.A.B.unittest = '4AB.unittest' self.assertEqual( config.get( ["A", "B", "unittest"] ) , '4AB.unittest', "dotmap get auf dritter ebene") # dotmap set oberste ebene config.set("5unittest", '5-unittest') # dotmap get self.assertEqual( config.get("5unittest"), '5-unittest', "dotmap set auf erster ebene anlegen") # dotmap set oberste ebene überschreiben config.set("5unittest", '5a-unittest') # dotmap get self.assertEqual( config.get("5unittest"), '5a-unittest', "dotmap set auf erster ebene ändern") # dotmap set zweite ebene config.set("B5.unittest", '5B-unittest') # dotmap get self.assertEqual( config.get("B5.unittest"), '5B-unittest', "dotmap set auf zweiter ebene") # dotmap set zweite ebene als list config.set(["C5","unittest"], '5C-unittest') # dotmap get self.assertEqual( config.get(["C5","unittest"]), '5C-unittest', "dotmap set/get auf zweiter ebene als list") # dotmap set zweite ebene neues Element config.set("B5.unittestA", '5B-unittest') self.assertEqual( config.get("B5").toDict(), {'unittest': '5B-unittest', 'unittestA': '5B-unittest'}, "dotmap set zweite ebene neues Element") # hilfsfunktion dict_merge testen a = {"A":1} b = {"B":2} c = dict_merge(a, b) self.assertEqual( c, {'A': 1, 'B': 2}, "dict_merge auch neue keys") c = dict_merge(a, b, False) self.assertEqual( c, {'A': 1}, "dict_merge nur vorhandene keys") # test in config setzen update prüfen # localtime = time.strftime("%Y%m%d %H:%M:%S.%f", time.localtime(time.time()) ) config.test = {"a":1, "time": localtime } # a verändern config.update( { "test": {"a":2} }) self.assertEqual( config.test, {"a":2, "time": localtime }, "Fehler bei config update") # ohne mqtt findet default logging statt (konsole) # .. todo:: Konsole logger funktionen noch überprüfen logger = logging.getLogger( "MQTT" ) logger.debug('logger.debug') logger.info("logger.info") logger.warning("logger.warning") logger.error("logger.error") # mqtt logging prüfen # if config.get("server.mqtt.host", "") == "": print( "(MQTT) keine Angaben in config vorhanden. MQTT wird nicht getestet!") return; # config mit anderem mqttLevel config = ispConfig( mqttlevel=30 ) mqtt = config.mqttGetHandler() self.assertIsNotNone( mqtt, "kein MQTT handler vorhanden") results = {} mqtt_event = threading.Event() mqttResult = None def onMqtt( msg ): global mqttResult # in results die empfangenen ablegen mqttResult = msg results[ msg["topic"] ] = msg["payload"] mqtt_event.set() # funktion bei signal aufrufen mqtt.signal.connect( onMqtt ) def publishThread( args ): global mqttResult mqttResult = None mqtt_event.clear() # Als Thread aufrufen, über mq.get() wird die Rückgabe von _retrieve abgerufen thread = threading.Thread( target=mqtt.publish, args=( args,) ) thread.start() # max 2 sekunden oder auf mqtt_event aus onMqtt warten while not mqtt_event.wait( timeout=3 ): mqtt_event.set() return mqttResult # die eigenen script infos result = publishThread({ "topic": "cmnd/status" } ) self.assertEqual( result["topic"], "stat/status", "Fehler bei cmnd/status abfrage") # python process vorhanden? result = publishThread({ "topic": "cmnd/process", "payload" : "python" } ) #print("----------------------cmnd/process", result ) self.assertEqual( result["topic"], "stat/process", "Fehler bei process abfrage") # publish ohne topic - publish wird nicht aufgerufen # hier wird in publishThread auf timeout gewartet result = publishThread({ "payload": "publish ohne topic - publish wird nicht aufgerufen" }) self.assertIsNone( result, "Fehler bei process abfrage") # publish ohne payload - publish wird mit leerem payload aufgerufen result = publishThread({ "topic": "cmnd/test/leer" }) self.assertEqual( result["payload"], "", "Fehler bei leerem payload") # payload mit object - publish wird mit leerem payload aufgerufen nur (str, bytearray, int, float) ist ok result = publishThread({ "topic": "cmnd/test/object", "payload": object() }) self.assertEqual( result["payload"], "", "Fehler bei object payload") # payload als Text result = publishThread({ "topic": "cmnd/test/string", "payload": "payload als Text" }) self.assertEqual( result["payload"], "payload als Text", "Fehler bei text payload") # payload als dict result = publishThread({ "topic": "cmnd/test/dict", "payload": {"text":"payload als dict"} }) self.assertEqual( result["payload"], {"text":"payload als dict"}, "Fehler bei dict payload") # mqtt.client.subscribe( "gqa_dev/logging/#" ) # mqtt funktionen über logger logger = logging.getLogger( "MQTT" ) logger.setLevel( logging.DEBUG ) logger.send() logger.send("test/publish") logger.progressStart( "test" ) logger.progress( "test", 50 ) logger.progressReady( "test" ) # test über mqtt anstatt über sofort über logger mqtt.logging = True mqtt.info("config.info") mqtt.warning("config.warning") mqtt.error("config.error") # .. todo:: config ohne mqtt Ausgabe auf der Konsole config.mqttCleanup() mqtt.info("config.info nach cleanup") mqtt.warning("config.warning nach cleanup") mqtt.error("config.error nach cleanup") # config mit falschen mqtt Angaben # config = ispConfig( ) port = config._config.server.mqtt.port config._config.server.mqtt.port = 111111 config.mqttInitLogger( cleanup=True ) mqtt = config.mqttGetHandler() self.assertIsNone( mqtt, "Trotz init Fehler MQTT handler vorhanden") #mqtt.info("config.info nach Fehler bei MQTT config") config._config.server.mqtt.port = port config.mqttInitLogger( cleanup=True ) time.sleep(4) # Sleep for 2 seconds um mqtt zu empfangen # hier gibt es keine Ausgaben, da mqtt nicht mehr da ist logger.info("logger.info nach MQTT init Fehler") logger.send("cmnd/test/publish", "nach MQTT init Fehler") time.sleep(2) # Sleep for 2 seconds um logger mqtt zu empfangen #print( results ) self.assertIn( "cmnd/test/publish", results, "Fehler nach MQTT init Fehler") #mqtt.publish({ # "topic": "cmnd/status" #}) # mqtt in config schließen config.mqttCleanup( ) #print( results ) def test_config_files( self ): # einfach config bereitstellen config = ispConfig( ) temp_conf = { "unittest": True, "version" : "0.0.1", "variables": { "Version" : "0.0.1a", }, "value": 0, "content": "test" } config = ispConfig( config = temp_conf ) test = { "value" : config.get("value"), "content" : config.get("content"), "info" : config.get("info") } self.assertDictEqual(test, { "value" : 0, "content" : "test", "info" : None }, "config Rückgabe stimmt nicht") # Versions Angabe prüfen # zusätzliche Dateien anlegen unitest_json_file_00 = os.path.join( config.BASE_DIR, "config", "config-18200000.json") with open(unitest_json_file_00, 'w') as f: f.write( '{ "value": 0, "content": "test" }' ) unitest_json_file_01 = os.path.join( config.BASE_DIR, "config", "config-18200101.json") with open(unitest_json_file_01, 'w') as f: f.write( '{ "value": 1, "info": "info 18200101" }' ) unitest_json_file_05 = os.path.join( config.BASE_DIR, "config", "config-18200105.json") with open(unitest_json_file_05, 'w') as f: f.write( '{ "value": 5, "info": "info 18200105" }' ) config = ispConfig( ) test = { "value" : config.get("value"), "content" : config.get("content"), "info" : config.get("info") } self.assertDictEqual(test, { "value" : 5, "content" : "test", "info" : "info 18200105" }, "config Rückgabe stimmt nicht") config = ispConfig( lastOverlay="18200101" ) test = { "value" : config.get("value"), "content" : config.get("content"), "info" : config.get("info") } self.assertDictEqual(test, { "value" : 1, "content" : "test", "info" : "info 18200101" }, "config Rückgabe stimmt nicht") os.remove( unitest_json_file_00 ) os.remove( unitest_json_file_01 ) os.remove( unitest_json_file_05 ) # config-0000.json mit falschen Inhalt erzeugen, # Fehler prüfen und Datei wieder löschen # error_json_file = os.path.join( config.BASE_DIR, "config", "config-0000.json") with open(error_json_file, 'w') as f: f.write( "#Falscher Inhalt" ) config = ispConfig() self.assertEqual( config._loadErrors, [ error_json_file ], "load error wurde nicht ausgelöst") os.remove( error_json_file ) def test_config_jinja(self): '''jinja Template Funktionen der config testen. ''' # eine eigene config mit resources im tests Ordner config = ispConfig( config={ "server": { "webserver": { "resources" : os.path.join( ABSPATH, "resources" ) } } }) # das aktuelle datum datum = datetime.now().strftime('%d.%m.%Y') result_A = """<ul> <li>testuser</li> </ul> <h2>Markdown</h2> <ul> <li>Datum aus variables <strong>Datenausgabe</strong> :{{Datenausgabe}}</li> <li>Inhalt aus variables<ul> <li>Version: </li> <li>render_mode: </li> </ul> </li> </ul> <h2>icon</h2> <i class="mdi mdi-check-outline green-text"></i> <img src="/test.svg" alt="test.svg" /> Datum mit now: #datum#""".replace( "#datum#", datum ) result_B = """<ul> <li>testuser</li> </ul> <h2>Markdown</h2> <ul> <li>Datum aus variables <strong>Datenausgabe</strong> :#datum#</li> <li>Inhalt aus variables<ul> <li>Version: </li> <li>render_mode: </li> </ul> </li> </ul> <h2>icon</h2> <i class="mdi mdi-check-outline green-text"></i> <img src="/test.svg" alt="test.svg" /> Datum mit now: #datum#""".replace( "#datum#", datum ) meta = { "user" : "testuser", "Datenausgabe": "{{ now.strftime('%d.%m.%Y') }}", "name": "{{user}}" } tpl = """{% markdown %} * {{ user }} {% endmarkdown %} {% include "test_template.jinja" %} Datum mit now: {{ now.strftime('%d.%m.%Y') }}""" result = config.render_template( tpl, meta, deep_replace=False ) self.assertEqual(result, result_A, "template nicht OK") result = config.render_template( tpl, meta, deep_replace=True ) self.assertEqual(result, result_B, "template nicht OK") def test_webapp_base_system( self ): ''' Webapp Aufruf auf system funktionen ''' response = self.app.get( "api/system" ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") response = self.app.get( "api/system", query_string = { "format" : "html" } ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") response = self.app.get( "api/system/test", query_string = { "zahl" : 12 } ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertDictEqual( response.json["data"], { "_ispcp": {}, "bool": False, "text": "typenlos", "zahl": 12.0}, "Response data nicht OK" ) response = self.app.get( "api/system/15" ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertDictEqual( response.json["data"]["kwargs"], {'format': 'html', 'info': 'kwargs', 'systemId': '15'}, "Response data nicht OK" ) # print("test_webapp_base_system", response.json ) def test_webapp_base_statics( self ): ''' Webapp Aufruf auf Statische Inhalte ''' # index auf zwei arten aufrufen response = self.app.get( "/" ) #self.assertEqual(response.status_code, 200, "Api Status nicht 200") index = response.data response = self.app.get( "/render/index", query_string = { "zahl":"012", "bool":True, "test":1, "_ispcp": json.dumps( {"name":"B"} ) } ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual(index, response.data, "index und render/index nicht gleich") # render auf nicht auf nicht vorhandenes Template in ui response = self.app.get( "/render/keintemplate" ) self.assertEqual(response.status_code, 404, "render auf nicht auf nicht vorhandenes Template in ui") # load auf nicht vorhandene Datei testen response = self.app.get( "/globals/js/keinedatei" ) self.assertEqual(response.status_code, 404, "load auf nicht vorhandene Datei") # in ui eine unittest_route.phtml erzeugen route_file = os.path.join( ABSPATH , "..", "ui", "unittest_route.phtml") with open(route_file, 'w') as f: f.write( "value={{ value }}" ) # ohne parameter response = self.app.get( "/unittest_route" ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual(response.data, b"value=None", "Inhalt ist nicht value=None;_ispcp=") # zwei gleiche parameter (nur der erste wird verwendet) response = self.app.get( "/unittest_route?value=12&value=1" ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual(response.data, b"value=12", "Inhalt ist nicht value=12;_ispcp= FirstValueURIParser") # unittest_route.phtml in ui wieder entfernen os.remove( route_file ) # in ui eine unittest_route_ispcp.phtml erzeugen route_file1 = os.path.join( ABSPATH , "..", "ui", "unittest_route_ispcp.phtml") with open(route_file1, 'w') as f: f.write( "{{ params }}" ) # Parameter als dict response = self.app.get( '/unittest_route_ispcp' , query_string = { "name":"A", "uuid":1, "id":1, "_ispcp": json.dumps( {"name":"B"} ) } ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertDictEqual( json.loads( response.data.decode('utf-8') ), {"uuid": "1", "id": "1", "name": "B"}, "Inhalt ist nicht mit dict") # unittest_route_ispcp.phtml in ui wieder entfernen os.remove(route_file1) # # mit fehler bei _ispcp response = self.app.get( "/render/index", query_string = { "zahl":"012", "bool":True, "test":1, "_ispcp": "name" } ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") def test_webapp_base_extras( self ): ''' Website Aufruf für zusätzliche Inhalte ''' # htmlcov laden geht nur wenn es schon erzeugt wurde htmlcov_path = osp.join( ABSPATH , "..", ".htmlcov") if osp.isdir( htmlcov_path ): response = self.app.get( "/coverage" ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") response = self.app.get( "/coverage/coverage.css" ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") else: print( "(coverage) Test erst nach dem Erstellen möglich." ) # über resources laden response = self.app.get( "resources/logo.png" ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") # über fonts laden aber mit Fehler für coverage response = self.app.get( "fonts/irgendwas" ) self.assertEqual(response.status_code, 404, "Api Status nicht 404") # über dbadminframe laden response = self.app.get( "dbadminframe" ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") # docs iframe laden response = self.app.get( "/docs" ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") # /docs/ wird zu /docs also auch iframe laden response = self.app.get( "/docs/" ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") # docs laden (beim ersten Aufruf erzeugen) response = self.app.get( "/docs/index.html" ) # es kommt vor das erst beim 2. Aufruf alles erzeugt wird if response.status_code == 404: # 2. Versuch response = self.app.get( "/docs/index.html" ) # jetzt OK self.assertEqual(response.status_code, 200, "docs Aufruf Api Status nicht 200. Wurde docs erzeugt?") # dbadmin laden response = self.app.get( "/dbadmin" ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") # neue webapp ohne parameter webbapp =ispBaseWebApp( ) self.assertEqual(webbapp._config.get("server.webserver.TESTING"), True, "Testing ist nicht True") # neue webapp mit dict nur mit TESTING Angabe webbapp =ispBaseWebApp( {"server" : {"webserver" : { "TESTING": True } } } ) self.assertEqual(webbapp._config.get("server.webserver.TESTING"), True, "Testing ist nicht True") def test_webapp_base_api( self ): # Inhalt von swagger mit der Angabe in custom_swagger_path prüfen response = self.app.get( "api/swagger.json" ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json["info"]["title"], "test_isp", "swagger file nicht ok") self.assertEqual( list( response.json["paths"].keys() ), ['/dbtests/', '/dbtests/groupby', '/dbtests/groupsplit', '/dbtests/pandas', '/dbtests/test', '/dbtests/undefined', '/dbtests/{dbtestsId}/', '/dbtests/{dbtestsId}/dbtestsrel', '/dbtestsrel/', '/dbtestsrel/groupby', '/dbtestsrel/groupsplit', '/dbtestsrel/undefined', '/dbtestsrel/{dbtestsrelId}/', '/dbtestsrel/{dbtestsrelId}/dbtests', '/dummy/', '/dummy/pdf', '/dummy/test', '/dummy/{dummyId}/', '/system/', '/system/test', '/system/{systemId}/' ], "Fehlerhafte paths Angaben in swagger.json") response = self.app.get( "api/gibtsnicht" ) self.assertEqual(response.status_code, 404, "Fehlerhafter api Zugriff ist nicht 404") def test_webapp_dummy_test( self ): ''' Api aufruf durchführen GET /api/dummy/ ''' # --- dummy Klasse abfragen # dummy api_list abfragen response = self.app.get( "api/dummy" ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json["data"], [{ 'attributes': {'function': 'api_list', 'kwargs': {'_ispcp': {}}}, 'id': '12', 'links': {'self': 'http://localhost/api/dummy/12/'}, 'type': 'dummy' }], "falsche api_list Rückgabe" ) # dummy api_get abfragen wird dummyId mitgegeben response = self.app.get( "api/dummy/12" ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") #print(response.json["data"][0]) self.assertDictEqual( response.json["data"], { 'attributes': {'dummyId': '12'}, 'id': 12, 'links': {'self': 'http://localhost/api/dummy/12/'}, 'type': 'dummy' }, "falsche id Rückgabe" ) #print( response.json ) # ohne Pflichfeld Angabe test gibt es nicht response = self.app.get( "api/dummy/test" ) # print("api/dummy/test", response.json ) self.assertEqual(response.status_code, 400, "Api Status nicht 400") self.assertDictEqual( response.json, { "message": { "zahl": "Eine Zahl" } }, "nicht abgelehnt ohne Pflichfeld Angabe" ) # ohne text (hat default) mit test (nicht vorhanden) # /api/system/test?zahl=012&bool=True&test=1&_ispcp={"name":"B"} response = self.app.get( "api/dummy/test", query_string={ "zahl":"012", "bool":True, "test":1, "_ispcp": json.dumps( {"name":"B"} ) } ) # kommen auch zusätzliche Angaben und werden unnötige ausgefiltert self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertDictEqual( response.json["data"], { "_ispcp": {"name": "B"}, "bool": True, "text": "typenlos", "zahl": 12.0 }, "Parameter Auswertung falsch" ) response = self.app.get( "api/dummy/undefined" ) # einen undefined holen self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json["data"], {'attributes': {}, 'id': 'undefined', 'type': 'dummy'}, "undefined fehlerhaft" ) # Dummy ohne funktion gibt undefined Datensatz response = self.app.get( "api/dummy/gibtsnicht" ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json["data"], { 'attributes': {}, 'id': 'undefined', 'type': 'dummy' }, "Dummy ohne funktion gibt keine undefined datensatz " ) # response = self.app.get( "api/dummy/test", query_string={ "zahl": 1 } ) self.assertEqual(response.status_code, 200, "Status nicht 200") self.assertEqual( response.json["data"], [], "Test leere Liste" ) response = self.app.get( "api/dummy/test", query_string={ "zahl": 2 } ) self.assertEqual(response.status_code, 200, "Status nicht 200") self.assertEqual( response.json["data"], [{"a": 1, "b": 2}], "Test Liste mit einem Element" ) # fehler bei der Umwandlung data bleibt leer response = self.app.get( "api/dummy/test", query_string={ "zahl": 3 } ) self.assertEqual(response.status_code, 200, "Status nicht 200") self.assertEqual( response.json["data"], [], "fehler bei der Umwandlung data bleibt leer" ) response = self.app.get( "api/dummy/test", query_string={ "zahl": 4 } ) self.assertEqual(response.status_code, 200, "Status nicht 200") response = self.app.get( "api/dummy/test", query_string={ "zahl": 5, "_ispcp" : "{test}"} ) self.assertEqual(response.status_code, 200, "Status nicht 200") self.assertEqual( response.json['errors'], [{'title': 'swagger Parameter Json Error', 'detail': '_ispcp={test}', 'code': None}], "Parameter Json Error" ) # _int_query selbst aufrufen response = self.app.get( "api/dummy/test", query_string={ "zahl": 6 } ) self.assertEqual(response.status_code, 200, "Status nicht 200") self.assertEqual( response.json['data'], [{'A': 1}, {'B': 2}], "Parameter Json Error" ) # _int_group_query selbst aufrufen response = self.app.get( "api/dummy/test", query_string={ "zahl": 7 } ) self.assertEqual(response.status_code, 200, "Status nicht 200") self.assertEqual( response.json['errors'], [], # [{'message': 'Fehler bei _int_group', 'info': "'dummyQuery' object has no attribute 'group_by'"}], "_int_group_query selbst aufrufen" ) # access_cls selbst aufrufen response = self.app.get( "api/dummy/test", query_string={ "zahl": 8 } ) self.assertEqual(response.status_code, 200, "Status nicht 200") self.assertEqual( response.json['data'], [{'nicht da': ''}, {'sqlalchemy.BigInteger': ''}], "access_cls selbst aufrufen" ) # iso2date aufrufen response = self.app.get( "api/dummy/test", query_string={ "zahl": 9 } ) self.assertEqual(response.status_code, 200, "Status nicht 200") self.assertEqual( response.json['data'], [ {'test=None': None}, {'20180415=2018-04-15': '2018-04-15'}, {'2018-04-15=2018-04-15': '2018-04-15'}, {'2018-04-15=2018-04-15': '2018-04-15'}, {'2018-04-15 14:36:25=2018-04-15': '2018-04-15'}, {'2018-04-15=18-04-15 00:00:00': '2018-04-15 00:00:00'}, {'2018-04-15 14:36:25=2018-04-15 14:36:25': '2018-04-15 14:36:25'}, {'20180415 14:36:25=2018-04-15 14:36:25': '2018-04-15 14:36:25'}, {'20180415 14:36=2018-04-15 14:36:00': '2018-04-15 14:36:00'}, {'201A0415 14:36:25=None': None}, {'201A0415 14:36=None': None}, {'201A0415=None': None} ], "iso2date aufrufen" ) # versuchen eine vorhandene Funktion ohne rpc Kennung aufzurufen response = self.app.get( "api/dummy/norpc" ) self.assertEqual(response.status_code, 400, "Status nicht 400") self.assertEqual( response.json, {}, "versuchen eine vorhandene Funktion ohne rpc Kennung aufzurufen" ) #print( response.json ) def test_webapp_db_tests_A( self ): ''' Api aufruf durchführen GET /tests/ ''' # zuerst den zugriff testen und prüfen ob die tabelle 5 datensätze hat # response = self.app.get( "api/dbtests/", query_string={}) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( len(response.json["data"]), 8, "keine 8 Datensätze" ) return # # einen Datensatz zusätzlich einfügen # response = self.app.post( "api/dbtests/", headers={'Content-Type': 'application/json'}, data=json.dumps({ "data" : { "attributes": { "string":"sechs", # Pflichtfeld #"date":"2020-08-19", "integer":6 }, "type":"dbtests" } }), follow_redirects=True) self.assertEqual(response.status_code, 201, "Api Status nicht 201 (Created)") self.assertEqual( response.json["data"]["id"], '6', "Datensatz id ist nicht 6") # record merken newRecord6 = response.json["data"]["attributes"] id6 = response.json["data"]["id"] link6 = response.json["data"]["links"]["self"] # # einen zweiten einfügen # response = self.app.post( "api/dbtests/", headers={'Content-Type': 'application/json'}, data=json.dumps({ "data" : { "attributes": { "string":"sieben", # Pflichtfeld #"date":"2020-08-19", "integer":7 }, "type":"dbtests" } }), follow_redirects=True) self.assertEqual(response.status_code, 201, "Api Status nicht 201 (Created)") self.assertEqual( response.json["data"]["id"], '7', "Datensatz id ist nicht 7") # record merken newRecord7 = response.json["data"]["attributes"] id7 = response.json["data"]["id"] link7 = response.json["data"]["links"]["self"] # # jetzt alle holen und prüfen # response = self.app.get( "api/dbtests/") self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( len(response.json["data"]), 7, "Datensatzanzahl ist nicht 7") id = response.json["data"][5]["id"] # zählung ab 0 (5 ist record 6) record = response.json["data"][5]["attributes"] link = response.json["data"][5]["links"]["self"] self.assertEqual( id, id6, "Datensatz id=6 vom ersten stimmt nicht") self.assertEqual( record, newRecord6, "Datensatz Inhalt vom ersten stimmt nicht") # # den siebten Datensatz über den angegebenen link holen # response = self.app.get( link7 ) self.assertEqual( response.json["data"]["id"], '7', "Datensatz Id Rückgabe ist nicht 7") self.assertEqual( type(response.json["data"]), dict, "Datensatz data ist kein dict") # Inhalt vergleichen self.assertEqual( response.json["data"]["attributes"], newRecord7, "Datensatz Inhalt stimmt nicht") # # siebten Datensatz ändern - die id muss in body und path angegeben werden # response = self.app.patch( link7, headers={'Content-Type': 'application/json'}, data=json.dumps({ "data" : { "attributes": { # "date":"2020-08-19 00:00", # 2020-08-20, 00:00 "string":"changed", }, "id": '7', "type":"dbtests" } }), follow_redirects=True) # 200 - Request fulfilled, document follows self.assertEqual(response.status_code, 200, "Api Status nicht 200") # Inhalt darf nicht mehr gleich sein self.assertNotEqual( response.json["data"], newRecord7, "Datensatz Inhalt ist noch gleich") # # den zweiten Datensatz über den angegebenen link holen und Änderungen prüfen # response = self.app.get( link7 ) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json["data"]["attributes"]["string"], "changed", "Feldinhalt ist nicht changed") # alle holen response = self.app.get( "api/dbtests/") self.assertEqual(response.status_code, 200, "Api Status nicht 200") lastCount = len(response.json["data"] ) # Datensatz 6 und 7 löschen response = self.app.delete( link6, headers={'Content-Type': 'application/json'} ) self.assertEqual(response.status_code, 204, "Api Status nicht 204") # alle verbleibenden holen und Anzahl prüfen response = self.app.get( "api/dbtests/") self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual(len(response.json["data"] ), lastCount - 1 , "Api Status nicht {}".format( lastCount - 1 )) # jetzt noch 7 löschen response = self.app.delete( link7, headers={'Content-Type': 'application/json'} ) self.assertEqual(response.status_code, 204, "Api Status nicht 204") # nach dem löschen Anzahl prüfen response = self.app.get( "api/dbtests/", query_string={}) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( len(response.json["data"]), 5, "keine 5 Datensätze nach dem löschen von 6 und 7" ) # fehler bei falschem patch response = self.app.patch( link7, headers={'Content-Type': 'application/json'}, data=json.dumps({ "data" : { "attributes": { "string_gibtsnicht":"changed", }, "id": '99', "type":"dbtests" } }), follow_redirects=True) self.assertEqual(response.status_code, 500, "Api Status nicht 500") self.assertEqual( response.json["App-Error"], [{'message': 'patch - unbekannter Fehler', 'info': '500'}], "fehler bei falschem patch" ) def test_webapp_db_tests_rqlFilter( self ): ''' Api aufruf durchführen GET /tests/ ''' # einen undefined holen response = self.app.get( "api/dbtests/undefined") self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json["data"], {'attributes': { 'active': None, 'data': None, 'date': None, 'decimal': None, 'float': None, 'gruppe': None, 'integer': None, 'isodate': None, 'isodatetime': None, 'numeric': None, 'string': None, 'tags': None }, 'id': 'undefined', 'type': 'dbtests'}, "einen undefined holen" ) # keine fehler response = self.app.get( "api/dbtests/", query_string={ "art" : "rqlFilter", "filter" : "eq(id,1)" }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( len(response.json["data"]), 1, "eq(id,1) hat keine Daten") # leere rückgabe bei nicht vorhandener value response = self.app.get( "api/dbtests/", query_string={ "art" : "rqlFilter", "filter" : "eq(id,appDialog)" }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual(response.json["data"], [], "eq(id,appDialog) hat Daten") self.assertEqual( response.json["infos"]["rql"], [ {'title': 'filter', 'detail': 'eq(id,appDialog)', 'code': None}, {'title': '_rql_where_clause', 'detail': { 'where': 'dbtests.id = :id_1', 'params': {'id_1': 'appDialog'} }, 'code': None } ], "eq(id,appDialog)" ) response = self.app.get( "api/dbtests/", query_string={ "art" : "rqlFilter", "filter" : "and(eq(active,true),lt(float,numeric))" }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") # Fehler bei falscher Filterangabe response = self.app.get( "api/dbtests/", query_string={ "art" : "rqlFilter", "filter" : "eq(id=1)" }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual(response.json["data"], [], "eq(tid=1) hat Daten") self.assertEqual(response.json["errors"],[{ 'title': '_int_rqlfilter', 'detail': 'rql-error: RQL Syntax error: (\'eq(id=1)\', 5, \'Expected ")"\')', 'code': None } ], "Fehler bei falscher Filterangabe - eq(id=1)") response = self.app.get( "api/dbtests/", query_string={ "art" : "rqlFilter", "filter" : "eq(tid=1)" }) #print("AppInfo203", response.json) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual(response.json["data"], [], "eq(tid=1) hat Daten") self.assertEqual(response.json["errors"],[{ 'title': '_int_rqlfilter', 'detail': 'rql-error: RQL Syntax error: (\'eq(tid=1)\', 6, \'Expected ")"\')', 'code': None } ], "Fehler bei falscher Filterangabe - eq(tid=1)") # einen nicht vorhandenen Datensatz abrufen # FIXME: Meldung auf der Konsole unterdrücken in method_wrapper vorher abfangen ? response = self.app.get( "api/dbtests/100") self.assertEqual(response.status_code, 404, "Api Status nicht 404 - notFound") def test_webapp_db_filter( self ): ''' Api aufruf durchführen GET /dbtests?filter= ''' response = self.app.get( "api/dbtests/", query_string={ "art" : "filter field", "filter" : '[{"name":"string","op":"eq","val":"one"}]' }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( len(response.json["data"]), 1, "Anzahl filter field stimmt nicht") response = self.app.get( "api/dbtests/", query_string={ "art" : "filter query", "filter[string]" : "one" }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( len(response.json["data"]), 1, "Anzahl filter query stimmt nicht") response = self.app.get( "api/dbtests/", query_string={ "art" : "filter rql", "filter" : "eq(string,one)" }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( len(response.json["data"]), 1, "Anzahl filter rql stimmt nicht") response = self.app.get( "api/dbtests/", query_string={ "art" : "filter search", "filter" : "*one" }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( len(response.json["data"]), 2, "Anzahl filter search stimmt nicht") response = self.app.get( "api/dbtests/", query_string={ "art" : "filter mixed", "filter" : '[{"name":"active","op":"eq","val":true}]|*one' }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( len(response.json["data"]), 2, "Anzahl filter search stimmt nicht") def test_webapp_db_tests_C( self ): # funktion test in dbtests aufrufen response = self.app.get( "api/dbtests/test", query_string={ "art" : 'AppInfo203' }) self.assertEqual(response.status_code, 203, "Api Status nicht 203") self.assertEqual( response.json["infos"]["general"], [{'title': 'Test AppInfo', 'detail': 'App-Info mit code 203', 'code': 203}], "AppInfo203 fehlgeschlagen" ) response = self.app.get( "api/dbtests/test", query_string={ # ohne das Pflichtfeld art }) self.assertEqual(response.status_code, 400, "Api Status nicht 400") self.assertEqual( response.json["message"], {'art': 'bestimmt die Art des Tests'}, "Fehlende message bei fehlendem Pflichtfeld" ) response = self.app.get( "api/dbtests/test", query_string={ "art" : 'query' }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json['infos']['query'], [{ 'title': 'sql-lastquery', 'detail': 'query is None', 'code': None }, { 'title': 'sql-lastquery', 'detail': { 'query': 'SELECT dbtests.string \nFROM dbtests', 'params': {}}, 'code': None }], "Fehlende message bei fehlendem Pflichtfeld" ) response = self.app.get( "api/dbtests/test", query_string={ "art" : 'AppDialog' }) self.assertEqual(response.status_code, 200, "Api Status nicht 203") self.assertEqual( response.json["errors"], [], "error Meldung sollte nicht sein" ) self.assertEqual( response.json["infos"]["dialog"], [{ 'title': 'Test AppDialog', 'detail': { 'content': 'Einfach nur ein Dialog', 'dimensions': [200, 200], 'title': 'Test AppDialog' }, 'code': None }], "Fehler bei der Dialog Meldung" ) response = self.app.get( "api/dbtests/test", query_string={ "art" : 'AppDialog403' }) self.assertEqual(response.status_code, 403, "Api Status nicht 403") self.assertEqual( response.json["infos"]["dialog"], [{ 'title': 'Test AppDialog', 'detail': { 'content': 'AppDialog mit AppError und code 403', 'dimensions': [200, 200], 'title': 'Test AppDialog' }, 'code': 403 }], "Fehler bei der Dialog Meldung" ) response = self.app.get( "api/dbtests/test", query_string={ "art" : 'AppError' }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json["errors"], [{ 'title': 'Test AppError', 'detail': 'App-Error ohne code', 'code': None }], "error Meldung ist falsch" ) response = self.app.get( "api/dbtests/test", query_string={ "art" : 'AppError403' }) self.assertEqual(response.status_code, 403, "Api Status nicht 403") self.assertEqual( response.json["errors"], [{ 'title': 'Test AppError', 'detail': 'App-Error mit code 403', 'code': 403 }], "error Meldung ist falsch" ) response = self.app.get( "api/dbtests/test", query_string={ "art" : 'AppInfo' }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json["infos"]["general"], [{ 'title': 'Test AppInfo', 'detail': 'App-Info ohne code', 'code': None }], "error Meldung ist falsch" ) response = self.app.get( "api/dbtests/test", query_string={ "art" : 'AppInfo203' }) self.assertEqual(response.status_code, 203, "Api Status nicht 203") self.assertEqual( response.json["infos"]["general"], [{ 'title': 'Test AppInfo', 'detail': 'App-Info mit code 203', 'code': 203 }], "error Meldung ist falsch" ) def test_webapp_db_relation( self ): ''' Api aufruf für relative Tabellen api/dbtestsrel?filter=eq(dbtests_id,2) [{'attributes': {'dbtests_id': 2, 'rdata': None, 'rdate': None, 'rgroup': 'B', 'rinteger': 12, 'rstring': 'r_zwei'}, 'id': '2', 'links': {'self': 'http://localhost/api/dbtestsrel/2/'}, 'relationships': {'dbtests': {'data': None, 'links': {'self': 'http://localhost/api/dbtestsrel/2/dbtests'}}}, 'type': 'dbtestsrel'}] ''' # zuerst den zugriff testen und prüfen ob die tabelle leer ist # response = self.app.get( "api/dbtests/") self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( len( response.json["data"] ), 8, "keine 8 Datensätze" ) response = self.app.get( "api/dbtestsrel/") self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( len(response.json["data"]), 5, "keine 5 Datensätze" ) # daten über path und filter müssen gleich sein nur die globale links Angabe unterscheidet sich # http://127.0.0.1:5000/api/nutzung?_ispcp={%22_default%22:{%22ersatz_id%22:1754}}&filter=eq(ersatz_id,1754)&page[offset]=0&page[limit]=25 response = self.app.get( "api/dbtests/2/dbtestsrel") self.assertEqual(response.status_code, 200, "Api Status nicht 200") reldata = response.json response = self.app.get( "api/dbtestsrel", query_string={ "filter":"eq(dbtests_id,2)" }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( reldata["data"], response.json["data"], "Rückgaben sind nicht gleich" ) def test_webapp_db_groupby( self ): ''' Api aufruf für relative Tabellen # ohne group Angabe wird fields verwendet /api/<modul>/groupby?fields[<modul>]=<feld1> # mit group /api/<modul>/groupby?fields[<modul>]=<feld1,feld2>&groups=<feld1,feld2> # mit group und delimiter /api/<modul>/groupby?fields[<modul>]=<feld1,feld2>&groups[<modul>]=<feld1,feld2>&delimiter=, # mit Filter /api/<modul>/groupby?fields[<modul>]=<feld1,feld2>&filter=eq(aktiv,true) # mit labels /api/<modul>/groupby?fields[<modul>]=<feld1,feld2>&labels={"dbtests.gruppe": "Hallo"} ''' # mit fields Angabe response = self.app.get( "api/dbtests/groupby", query_string={ "fields[dbtests]":"gruppe" }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json["data"],[ {'attributes': {'hasChildren': 2, 'gruppe': 'A'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'hasChildren': 2, 'gruppe': 'B'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'hasChildren': 3, 'gruppe': 'C'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'hasChildren': 1, 'gruppe': 'D'}, 'id': None, 'type': 'dbtests'} ], "groupby mit fields Angabe Rückgabe fehlerhaft " ) # mit groups Angabe response = self.app.get( "api/dbtests/groupby", query_string={ "groups":"gruppe" }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json["data"],[ {'attributes': {'hasChildren': 2, 'gruppe': 'A'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'hasChildren': 2, 'gruppe': 'B'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'hasChildren': 3, 'gruppe': 'C'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'hasChildren': 1, 'gruppe': 'D'}, 'id': None, 'type': 'dbtests'} ], "groupby mit groups Angabe Rückgabe fehlerhaft " ) # mit groups Angabe und filter response = self.app.get( "api/dbtests/groupby", query_string={ "groups":"gruppe", "filter":"eq(active,true)" }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json["data"],[ {'attributes': {'hasChildren': 2, 'gruppe': 'A'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'hasChildren': 1, 'gruppe': 'B'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'hasChildren': 2, 'gruppe': 'C'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'hasChildren': 1, 'gruppe': 'D'}, 'id': None, 'type': 'dbtests'} ], "groupby mit groups Angabe Rückgabe fehlerhaft " ) # mit Filter und zwei Gruppierungs Feldern response = self.app.get( "api/dbtests/groupby", query_string={ "groups[dbtests]":"gruppe,tags", "filter":"eq(active,true)" }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json["data"],[ {'attributes': {'gruppe': 'A', 'hasChildren': 1, 'tags': 'A,K'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'gruppe': 'A', 'hasChildren': 1, 'tags': 'B K A'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'gruppe': 'B', 'hasChildren': 1, 'tags': 'A,K'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'gruppe': 'C', 'hasChildren': 1, 'tags': None}, 'id': None, 'type': 'dbtests'}, {'attributes': {'gruppe': 'C', 'hasChildren': 1, 'tags': 'M,K,one'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'gruppe': 'D', 'hasChildren': 1, 'tags': None}, 'id': None, 'type': 'dbtests'} ], "groupby mit Filter und zwei Gruppierungs Feldern fehlerhaft " ) # groupby mit label testen response = self.app.get( "api/dbtests/groupby", query_string={ "groups":"gruppe", "labels": '{"dbtests.gruppe": "lGruppe"}' }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json["data"], [ {'attributes': {'hasChildren': 2, 'lGruppe': 'A'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'hasChildren': 2, 'lGruppe': 'B'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'hasChildren': 3, 'lGruppe': 'C'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'hasChildren': 1, 'lGruppe': 'D'}, 'id': None, 'type': 'dbtests'} ], "groupby mit label fehlerhaft " ) # groupby mit zweifachen label testen response = self.app.get( "api/dbtests/groupby", query_string={ "groups":"gruppe", "labels": '{"dbtests.gruppe": ["lGruppeA", "lGruppeB"]}' }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json["data"], [ {'attributes': {'hasChildren': 2, 'lGruppeA': 'A', 'lGruppeB': 'A'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'hasChildren': 2, 'lGruppeA': 'B', 'lGruppeB': 'B'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'hasChildren': 3, 'lGruppeA': 'C', 'lGruppeB': 'C'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'hasChildren': 1, 'lGruppeA': 'D', 'lGruppeB': 'D'}, 'id': None, 'type': 'dbtests'} ], "groupby mit label fehlerhaft " ) # groupby mit fields und label testen response = self.app.get( "api/dbtests/groupby", query_string={ "fields[dbtests]":"gruppe", "labels": '{"dbtests.gruppe": "lGruppe"}' }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual(response.json["data"], [ {'attributes': {'hasChildren': 2, 'lGruppe': 'A'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'hasChildren': 2, 'lGruppe': 'B'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'hasChildren': 3, 'lGruppe': 'C'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'hasChildren': 1, 'lGruppe': 'D'}, 'id': None, 'type': 'dbtests'} ], "groupby mit fields und label fehlerhaft" ) # groupby mit fields und zweifachen label testen response = self.app.get( "api/dbtests/groupby", query_string={ "fields[dbtests]":"gruppe", "labels": '{"dbtests.gruppe": ["lGruppeA", "lGruppeB"]}' }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json["data"], [ {'attributes': {'hasChildren': 2, 'lGruppeA': 'A', 'lGruppeB': 'A'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'hasChildren': 2, 'lGruppeA': 'B', 'lGruppeB': 'B'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'hasChildren': 3, 'lGruppeA': 'C', 'lGruppeB': 'C'}, 'id': None, 'type': 'dbtests'}, {'attributes': {'hasChildren': 1, 'lGruppeA': 'D', 'lGruppeB': 'D'}, 'id': None, 'type': 'dbtests'} ], "groupby mit fields und label fehlerhaft" ) # id als gruppe wird ausgefiltert response = self.app.get( "api/dbtests/groupby", query_string={ "groups":"id" }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json["data"], [ {'attributes': {'hasChildren': 1}, 'id': 1, 'type': 'dbtests'}, {'attributes': {'hasChildren': 1}, 'id': 2, 'type': 'dbtests'}, {'attributes': {'hasChildren': 1}, 'id': 3, 'type': 'dbtests'}, {'attributes': {'hasChildren': 1}, 'id': 4, 'type': 'dbtests'}, {'attributes': {'hasChildren': 1}, 'id': 5, 'type': 'dbtests'}, {'attributes': {'hasChildren': 1}, 'id': 6, 'type': 'dbtests'}, {'attributes': {'hasChildren': 1}, 'id': 7, 'type': 'dbtests'}, {'attributes': {'hasChildren': 1}, 'id': 8, 'type': 'dbtests'} ] , "id als gruppe wird ausgefiltert" ) def test_webapp_db_groupsplit( self ): ''' Api aufruf für gruppierte Felddaten /api/<modul>/groupsplit?group=<feld1> /api/<modul>/groupsplit?group=<feld1>&delimiter=, /api/<modul>/groupsplit?group=<feld1>&delimiter=,&filter=eq(active,1) WITH split(word, str) AS ( SELECT '',tags||',' FROM dbtests WHERE active=1 UNION ALL SELECT substr(str, 0, instr(str, ',')), substr(str, instr(str, ',')+1) FROM split WHERE str!='' ) SELECT word as tags, count(*) AS hasChildren FROM split WHERE word!='' GROUP BY word ''' # groupsplit mit default delimiter (space) response = self.app.get( "api/dbtests/groupsplit", query_string={ "group":"tags", }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json["data"],[ {'attributes': {'hasChildren': 1, 'id': 'A', 'tags': 'A'}, 'id': 'A', 'type': 'dbtests'}, {'attributes': {'hasChildren': 2, 'id': 'A,K', 'tags': 'A,K'}, 'id': 'A,K', 'type': 'dbtests'}, {'attributes': {'hasChildren': 1, 'id': 'B', 'tags': 'B'}, 'id': 'B', 'type': 'dbtests'}, {'attributes': {'hasChildren': 1, 'id': 'B,M', 'tags': 'B,M'}, 'id': 'B,M', 'type': 'dbtests'}, {'attributes': {'hasChildren': 2, 'id': 'K', 'tags': 'K'}, 'id': 'K', 'type': 'dbtests'}, {'attributes': {'hasChildren': 1, 'id': 'L,A', 'tags': 'L,A'}, 'id': 'L,A', 'type': 'dbtests'}, {'attributes': {'hasChildren': 1, 'id': 'M,K,one', 'tags': 'M,K,one'}, 'id': 'M,K,one', 'type': 'dbtests'} ] , "groupsplit mit default (space) delimiter: Rückgabe fehlerhaft " ) # groupsplit mit delimiter response = self.app.get( "api/dbtests/groupsplit", query_string={ "group":"tags", "delimiter": "," }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json["data"],[ {'attributes': {'hasChildren': 2, 'id': 'A', 'tags': 'A'}, 'id': 'A', 'type': 'dbtests'}, {'attributes': {'hasChildren': 1, 'id': 'A K', 'tags': 'A K'}, 'id': 'A K', 'type': 'dbtests'}, {'attributes': {'hasChildren': 1, 'id': 'B', 'tags': 'B'}, 'id': 'B', 'type': 'dbtests'}, {'attributes': {'hasChildren': 1, 'id': 'B K A', 'tags': 'B K A'}, 'id': 'B K A', 'type': 'dbtests'}, {'attributes': {'hasChildren': 3, 'id': 'K', 'tags': 'K'}, 'id': 'K', 'type': 'dbtests'}, {'attributes': {'hasChildren': 1, 'id': 'L', 'tags': 'L'}, 'id': 'L', 'type': 'dbtests'}, {'attributes': {'hasChildren': 2, 'id': 'M', 'tags': 'M'}, 'id': 'M', 'type': 'dbtests'}, {'attributes': {'hasChildren': 1, 'id': 'one', 'tags': 'one'}, 'id': 'one', 'type': 'dbtests'} ], "groupsplit mit ',' delimiter: Rückgabe fehlerhaft " ) # groupsplit mit delimiter und filter response = self.app.get( "api/dbtests/groupsplit", query_string={ "group":"tags", "filter": "eq(active,1)", "delimiter": "," }) self.assertEqual(response.status_code, 200, "Api Status nicht 200") self.assertEqual( response.json["data"],[ {'attributes': {'hasChildren': 2, 'id': 'A', 'tags': 'A'}, 'id': 'A', 'type': 'dbtests'}, {'attributes': {'hasChildren': 1, 'id': 'B K A', 'tags': 'B K A'}, 'id': 'B K A', 'type': 'dbtests'}, {'attributes': {'hasChildren': 3, 'id': 'K', 'tags': 'K'}, 'id': 'K', 'type': 'dbtests'}, {'attributes': {'hasChildren': 1, 'id': 'M', 'tags': 'M'}, 'id': 'M', 'type': 'dbtests'}, {'attributes': {'hasChildren': 1, 'id': 'one', 'tags': 'one'}, 'id': 'one', 'type': 'dbtests'} ], "groupsplit mit ',' delimiter und filter 'eq(active,1)': Rückgabe fehlerhaft " ) def todo_test_webapp_db_typen( self ): ''' Verschiedene feldtype testen ''' # .. todo:: numerische Felder - # datums Felder - date # json Felder - data response = self.app.post( "api/dbtests/", headers={'Content-Type': 'application/json'}, data=json.dumps({ "data" : { "attributes": { "string":"sechs", # Pflichtfeld "date":"2020-08-19", "integer": 6, "data": {"A":1}, "float": 1/3, "decimal" : 1.2345, # soll nur 1.23 ergeben "numeric" : 5.6789, "isodate" :"2020-08-19", "isodatetime" :"2020-08-19 14:37" }, "type":"dbtests" } }), follow_redirects=True) print( "TODO: test_webapp_db_typen", response.json["data"] ) #self.assertEqual( response.status_code, 201, "Api Status nicht 201 (Created)") #self.assertEqual( response.json["data"]["attributes"]["date"], '2020-08-19', "Datensatz datum ist nicht 2020-08-19") #self.assertEqual( response.json["data"]["attributes"]["data"], {"A":1}, 'Datensatz data ist nicht {"A":1}') #self.assertEqual( response.json["data"]["attributes"]["float"], 0.3333333333333333, 'Datensatz float ist nicht 0.3333333333333333') #print( response.json["data"] ) pass def test_isp_mpdf_fonts( self ): """Testet Fonts für die PDF Erstellung mit fc-list Benötigte Fonts: * DejaVuSerif * Material Design Icons Returns ------- None. """ import subprocess cmd = '/usr/bin/fc-list --format="%{family[0]}\n" | sort | uniq' output = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE ).communicate()[0] self.assertIn( b"Material Design Icons", output, "Der Font 'Material Design Icons' fehlt im System" ) self.assertIn( b"DejaVu Serif", output, "Der Font 'DejaVuSerif' fehlt im System" ) def test_isp_mpdf_base( self ): ''' Ein PDF Dokument erstellen ''' response = self.app.get( "api/dummy/pdf" ) self.assertEqual(response.status_code, 400, "Status nicht 400") self.assertEqual( response.data, b"Keine PDF Datei (nofile.pdf) gefunden", "Testet Fehler bei Rückgabe eine fehlenden PDF Datei " ) config = ispConfig( ) v1 = config.variables.get("Version") # informationen zu pdf Erstellung response = self.app.get( "api/dummy/pdf", query_string={ "name" : "test-info" } ) self.assertEqual(response.status_code, 200, "Status nicht 200") self.assertEqual( response.json["data"]["varianten"], {'v1': v1, 'v2': 'u.0.1', 'v3': 'u.0.1'}, "resources Angabe stimmt nicht" ) # ein leeres PDF mit overlay response = self.app.get( "api/dummy/pdf", query_string={ "name" : "test-1" } ) self.assertEqual(response.status_code, 200, "Status nicht 200") self.assertEqual( response.json["data"]["body"], "", "PDF body ist nicht leer" ) self.check_pdf_data( response.json["data"], contents=0, pages=1, intern_check=True ) # text und markdown mit Header (h2) response = self.app.get( "api/dummy/pdf", query_string={ "name" : "test-2" } ) self.assertEqual(response.status_code, 200, "Status nicht 200") # kommt es hier zu einem Fehler stimmt die css Einbindung von weasyprint nicht self.check_pdf_data( response.json["data"], contents=1, pages=1, intern_check=True ) # wie test 2 aber markdown zuerst response = self.app.get( "api/dummy/pdf", query_string={ "name" : "test-2a" } ) self.assertEqual(response.status_code, 200, "Status nicht 200") #print( response.json["data"] ) self.check_pdf_data( response.json["data"], contents=1, pages=1, intern_check=True ) response = self.app.get( "api/dummy/pdf", query_string={ "name" : "test-3" } ) self.assertEqual(response.status_code, 200, "Status nicht 200") self.check_pdf_data( response.json["data"], contents=2, pages=4, intern_check=True ) response = self.app.get( "api/dummy/pdf", query_string={ "name" : "test-4" } ) self.assertEqual(response.status_code, 200, "Status nicht 200") # kommt es hier zu einem Fehler stimmt die font Einbindung von weasyprint nicht self.check_pdf_data( response.json["data"], contents=2, pages=3, intern_check=True ) # Inhalte über template file einfügen response = self.app.get( "api/dummy/pdf", query_string={ "name" : "test-5" } ) self.assertEqual(response.status_code, 200, "Status nicht 200") # selbst prüfen, da debug informationen vorliegen self.assertEqual( response.json["data"]["pages"], 2, "Anzahl der Seiten stimmt nicht" ) #print( response.json["data"] ) #print( response.json ) # .. todo:: rückgabe als pdf def suite( testClass:None ): '''Fügt alle Funktionen, die mit test_ beginnen aus der angegeben Klasse der suite hinzu Parameters ---------- testClass : unittest.TestCase Zu testende Klasse Returns ------- suite : unittest.TestSuite ''' if not testClass: testClass = ispTest suite = unittest.TestSuite( ) logger.setLevel( logging.ERROR ) # ERROR DEBUG WARNING if testClass: #suite.addTest( testClass('test_webapp_db_tests_filter') ) #suite.addTest( testClass('test_webapp_db_groupby') ) #suite.addTest( testClass('test_webapp_db_groupsplit') ) # return suite for m in dir( testClass ): if m.startswith('test_config_'): suite.addTest( testClass(m), ) pass elif m.startswith('test_webapp_base_'): suite.addTest( testClass(m), ) pass elif m.startswith('test_webapp_dummy_'): suite.addTest( testClass(m), ) pass elif m.startswith('test_webapp_db_'): suite.addTest( testClass(m), ) pass elif m.startswith('test_isp_mpdf_'): suite.addTest( testClass(m), ) pass return suite # ----------------------------------------------------------------------------- if __name__ == '__main__': ''' 0 (quiet): you just get the total numbers of tests executed and the global result 1 (default): you get the same plus a dot for every successful test or a F for every failure 2 (verbose): you get the help string of every test and the result ''' runner = unittest.TextTestRunner() runner.run( suite( ispTest ) )
main.py
import argparse import socket import subprocess import threading from sys import platform import getmac parser = argparse.ArgumentParser(description="Network scanner") parser.add_argument("-a",'--all',help="Scan from 10.10.0.1 to 10.10.255.1",action="store_true") parser.add_argument("-s",'--subnetwork',help="Specify a range or a number or even several numbers, eg: 1-25 or 1 or 1,50,24 -> scan 10.10.0.1 to 10.10.5.1") parser.add_argument("-pp","--prettyprint",help="Makes output prettier",action="store_true") parser.add_argument("-p","--ping",help="Choose how many times you want to ping a host, default 2",choices=['1','2','3','4','5']) parser.add_argument("hostnumber",help="The range of ip that will be scanned (per subnetwork), for eg: 1-25 -> 10.10.1.1 to 10.10.1.25",type=str) args = parser.parse_args() vendors = {'002272': 'American Micro-Fuel Device Corp.', '00D0EF': 'IGT', '086195': 'Rockwell Automation', 'F4BD9E': 'Cisco Systems, Inc', '5885E9': 'Realme Chongqing MobileTelecommunications Corp Ltd', 'BC2392': 'BYD Precision Manufacture Company Ltd.', '94E6F7': 'Intel Corporate', '405582': 'Nokia', 'A4E31B': 'Nokia', 'D89790': 'Commonwealth Scientific and Industrial Research Organisation', '883A30': 'Aruba, a Hewlett Packard Enterprise Company', 'B8A58D': 'Axe Group Holdings Limited', '50CEE3': 'Gigafirm.co.LTD', '98E743': 'Dell Inc.', 'C419D1': 'Telink Semiconductor (Shanghai) Co., Ltd.', '4C1D96': 'Intel Corporate', '887E25': 'Extreme Networks, Inc.', '086083': 'zte corporation', 'E01954': 'zte corporation', '10327E': 'Huawei Device Co., Ltd.', 'F8084F': 'Sagemcom Broadband SAS', '30FBB8': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F497C2': 'Nebulon Inc', 'A44519': 'Xiaomi Communications Co Ltd', '68DBF5': 'Amazon Technologies Inc.', '2446C8': 'Motorola Mobility LLC, a Lenovo Company', '1802AE': 'vivo Mobile Communication Co., Ltd.', '0C20D3': 'vivo Mobile Communication Co., Ltd.', '44D791': 'HUAWEI TECHNOLOGIES CO.,LTD', '8446FE': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D82918': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D0D003': 'Samsung Electronics Co.,LTD', '64B21D': 'Chengdu Phycom Tech Co., Ltd.', 'C42996': 'Signify B.V.', '980637': 'IEEE Registration Authority', '8CB84A': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', '98E8FA': 'Nintendo Co.,Ltd', '38C4E8': 'NSS Sp. z o.o.', '34DD7E': 'Umeox Innovations Co.,Ltd', 'CCCD64': 'SM-Electronic GmbH', '24DFA7': 'Hangzhou BroadLink Technology Co.,Ltd', 'B065F1': 'WIO Manufacturing HK Limited', '901234': 'Shenzhen YOUHUA Technology Co., Ltd', '542A1B': 'Sonos, Inc.', '5C925E': 'Zioncom Electronics (Shenzhen) Ltd.', '084FA9': 'Cisco Systems, Inc', '084FF9': 'Cisco Systems, Inc', '5098B8': 'New H3C Technologies Co., Ltd', '1100AA': 'Private', 'B84DEE': 'Hisense broadband multimedia technology Co.,Ltd', 'A89352': 'SHANGHAI ZHONGMI COMMUNICATION TECHNOLOGY CO.,LTD', 'E4CC9D': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', 'A8D0E3': 'Systech Electronics Ltd', '98BA39': 'Doro AB', 'D46BA6': 'HUAWEI TECHNOLOGIES CO.,LTD', 'CC0577': 'HUAWEI TECHNOLOGIES CO.,LTD', '308BB2': 'Cisco Systems, Inc', 'E0EB62': 'Shanghai Hulu Devices Co., Ltd', '08688D': 'New H3C Technologies Co., Ltd', 'E86F38': 'CHONGQING FUGUI ELECTRONICS CO.,LTD.', '48216C': 'China Mobile IOT Company Limited', '8CBE24': 'Tashang Semiconductor(Shanghai) Co., Ltd.', '08B3AF': 'vivo Mobile Communication Co., Ltd.', '30862D': 'Arista Network, Inc.', '6CE8C6': 'Earda Technologies co Ltd', '1C4176': 'China Mobile Group Device Co.,Ltd.', '608B0E': 'Apple, Inc.', '1871D5': 'Hazens Automotive Electronics(SZ)Co.,Ltd.', 'ACB1EE': 'SHENZHEN FENDA TECHNOLOGY CO., LTD', 'F8ADCB': 'HMD Global Oy', 'D462EA': 'HUAWEI TECHNOLOGIES CO.,LTD', '54BAD6': 'HUAWEI TECHNOLOGIES CO.,LTD', '94DC4E': 'AEV, spol. s r. o.', '1442FC': 'Texas Instruments', 'AC5D5C': 'FN-LINK TECHNOLOGY LIMITED', 'A4AE11': 'Hon Hai Precision Ind. Co., Ltd.', '54DED0': 'Sevio Srl', '6C5E3B': 'Cisco Systems, Inc', '401920': 'Movon Corporation', 'D03745': 'TP-LINK TECHNOLOGIES CO.,LTD.', '603A7C': 'TP-LINK TECHNOLOGIES CO.,LTD.', '000178': 'MARGI Systems, Inc.', '0CB771': 'ARRIS Group, Inc.', '58C876': 'China Mobile (Hangzhou) Information Technology Co., Ltd.', '2C1E4F': 'Chengdu Qianli Network Technology Co., Ltd.', '009052': 'SELCOM ELETTRONICA S.R.L.', '001A83': 'Pegasus Technologies Inc.', '50E085': 'Intel Corporate', '24166D': 'HUAWEI TECHNOLOGIES CO.,LTD', '940B19': 'HUAWEI TECHNOLOGIES CO.,LTD', '70C7F2': 'HUAWEI TECHNOLOGIES CO.,LTD', '3894ED': 'NETGEAR', '700433': 'California Things Inc.', 'DCA632': 'Raspberry Pi Trading Ltd', '88F56E': 'HUAWEI TECHNOLOGIES CO.,LTD', 'BC97E1': 'Broadcom Limited', '28D1B7': 'Shenzhen YOUHUA Technology Co., Ltd', 'C8C2FA': 'HUAWEI TECHNOLOGIES CO.,LTD', '88B362': 'Nokia Shanghai Bell Co., Ltd.', '0847D0': 'Nokia Shanghai Bell Co., Ltd.', '089C86': 'Nokia Shanghai Bell Co., Ltd.', '7C8956': 'Samsung Electronics Co.,Ltd', '9C93E4': 'Private', '88299C': 'Samsung Electronics Co.,Ltd', 'CC9093': 'Hansong Tehnologies', 'CC64A6': 'HUAWEI TECHNOLOGIES CO.,LTD', '30317D': 'Hosiden Corporation', 'F0A968': 'Antailiye Technology Co.,Ltd', '48E1E9': 'Chengdu Meross Technology Co., Ltd.', 'D81399': 'Hui Zhou Gaoshengda Technology Co.,LTD', '54E019': 'Ring LLC', '148430': 'MITAC COMPUTING TECHNOLOGY CORPORATION', 'B8A44F': 'Axis Communications AB', '0024EB': 'ClearPath Networks, Inc.', '50AF4D': 'zte corporation', 'C8EAF8': 'zte corporation', '709F2D': 'zte corporation', '383B26': 'Jiangsu Qinheng Co., Ltd.', '5CFAFB': 'Acubit', '9C7BEF': 'Hewlett Packard', '742EDB': 'Perinet GmbH', '201742': 'LG Electronics', 'CC8826': 'LG Innotek', 'EC5B73': 'Advanced & Wise Technology Corp.', 'E0CB1D': 'Private', '848BCD': 'IEEE Registration Authority', '14C03E': 'ARRIS Group, Inc.', 'C089AB': 'ARRIS Group, Inc.', 'D44DA4': 'Murata Manufacturing Co., Ltd.', 'DC7196': 'Intel Corporate', 'F8E5CF': 'CGI IT UK LIMITED', '6882F2': 'grandcentrix GmbH', 'D420B0': 'Mist Systems, Inc.', '08EDED': 'Zhejiang Dahua Technology Co., Ltd.', '6092F5': 'ARRIS Group, Inc.', '0022AF': 'Safety Vision, LLC', 'A091A2': 'OnePlus Electronics (Shenzhen) Co., Ltd.', '0080B5': 'UNITED NETWORKS INC.', 'B808CF': 'Intel Corporate', '1C697A': 'EliteGroup Computer Systems Co., LTD', '4C1744': 'Amazon Technologies Inc.', 'B03055': 'China Mobile IOT Company Limited', '905C34': 'Sirius Electronic Systems Srl', 'D46A35': 'Cisco Systems, Inc', 'D09C7A': 'Xiaomi Communications Co Ltd', 'C82C2B': 'IEEE Registration Authority', '8020DA': 'Sagemcom Broadband SAS', '68847E': 'FUJITSU LIMITED', '003085': 'Cisco Systems, Inc', '605F8D': 'eero inc.', 'C4B36A': 'Cisco Systems, Inc', '70F754': 'AMPAK Technology,Inc.', '6C8BD3': 'Cisco Systems, Inc', '68974B': 'Shenzhen Costar Electronics Co. Ltd.', '34E1D1': 'IEEE Registration Authority', '0021B7': 'LEXMARK INTERNATIONAL, INC.', '00A0B0': 'I-O DATA DEVICE,INC.', '2479F3': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '80A235': 'Edgecore Networks Corporation', 'C8C64A': 'Flextronics Tech.(Ind) Pvt Ltd', '30EA26': 'Sycada BV', '9C497F': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', 'C4E39F': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'F89A78': 'HUAWEI TECHNOLOGIES CO.,LTD', '88F872': 'HUAWEI TECHNOLOGIES CO.,LTD', 'EC5623': 'HUAWEI TECHNOLOGIES CO.,LTD', '5486BC': 'Cisco Systems, Inc', '402343': 'CHONGQING FUGUI ELECTRONICS CO.,LTD.', '18F18E': 'ChipER Technology co. ltd', '000422': 'Studio Technologies, Inc', '80DA13': 'eero inc.', '50EC50': 'Beijing Xiaomi Mobile Software Co., Ltd', '6061DF': 'Z-meta Research LLC', '7057BF': 'New H3C Technologies Co., Ltd', '8CE748': 'Private', '108286': 'Luxshare Precision Industry Co.,Ltd', '14B457': 'Silicon Laboratories', 'DC962C': 'NST Audio Ltd', '18022D': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D8BC59': 'Shenzhen DAPU Microelectronics Co., Ltd', '089798': 'COMPAL INFORMATION (KUNSHAN) CO., LTD. ', '246F28': 'Espressif Inc.', '8C79F5': 'Samsung Electronics Co.,Ltd', '48F8DB': 'HUAWEI TECHNOLOGIES CO.,LTD', '00122A': 'VTech Telecommunications Ltd.', 'B0518E': 'Holl technology CO.Ltd.', '681729': 'Intel Corporate', '2852E0': 'Layon international Electronic & Telecom Co.,Ltd', '58CB52': 'Google, Inc.', '7C6166': 'Amazon Technologies Inc.', '989BCB': 'AVM Audiovisuelles Marketing und Computersysteme GmbH', '94F7AD': 'Juniper Networks', '6063F9': 'Ciholas, Inc.', 'AC8FF8': 'Nokia', '6003A6': 'Inteno Broadband Technology AB', '7C5259': 'Sichuan Jiuzhou Electronic Technology Co., Ltd.', '44B295': 'Sichuan\xa0AI-Link\xa0Technology\xa0Co.,\xa0Ltd.', '9424E1': 'Alcatel-Lucent Enterprise', 'F8CA59': 'NetComm Wireless', '88B291': 'Apple, Inc.', 'C42AD0': 'Apple, Inc.', 'CCD281': 'Apple, Inc.', '200DB0': 'Shenzhen Four Seas Global Link Network Technology Co., Ltd.', 'D81EDD': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'D43FCB': 'ARRIS Group, Inc.', '5C7695': 'Technicolor CH USA Inc.', 'F84D33': 'Fiberhome Telecommunication Technologies Co.,LTD', 'C08ACD': 'Guangzhou Shiyuan Electronic Technology Company Limited', 'ACF6F7': 'LG Electronics (Mobile Communications)', 'E89E0C': 'Private', '48E6C0': 'SIMCom Wireless Solutions Co.,Ltd.', '383C9C': 'Fujian Newland Payment Technology Co.,Ltd.', 'C02E25': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '107717': 'SHENZHEN CHUANGWEI-RGB ELECTRONICS CO.,LTD', 'A86D5F': 'Raisecom Technology CO., LTD', '58ECED': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', 'A468BC': 'Private', '005079': 'Private', '100C6B': 'NETGEAR', '505FB5': 'Askey Computer Corp.', 'C4F0EC': 'Fiberhome Telecommunication Technologies Co.,LTD', 'E80FC8': 'Universal Electronics, Inc.', 'E45D37': 'Juniper Networks', '00EEAB': 'Cisco Systems, Inc', '54A703': 'TP-LINK TECHNOLOGIES CO.,LTD.', '907A58': 'Zegna-Daidong Limited', 'E009BF': 'SHENZHEN\u2002TONG BO WEI\u2002TECHNOLOGY Co.,LTD', '00131E': 'peiker acustic GmbH', '846991': 'Nokia', '001BF7': 'Lund IP Products AB', '783607': 'Cermate Technologies Inc.', 'B00073': 'Wistron Neweb Corporation', 'D88DC8': 'Atil Technology Co., LTD', 'D0ABD5': 'Intel Corporate', '88DE7C': 'Askey Computer Corp.', 'A8E2C1': 'Texas Instruments', '909A77': 'Texas Instruments', '04EE03': 'Texas Instruments', '4C2498': 'Texas Instruments', '7CD95C': 'Google, Inc.', 'C8AACC': 'Private', '002167': 'HWA JIN T&I Corp.', '000B86': 'Aruba, a Hewlett Packard Enterprise Company', 'DC31D1': 'vivo Mobile Communication Co., Ltd.', 'C8F750': 'Dell Inc.', 'D49234': 'NEC Corporation', '0007CB': 'FREEBOX SAS', '149FB6': 'GUANGDONG GENIUS TECHNOLOGY CO., LTD.', '00115A': 'Ivoclar Vivadent AG', '4CAEA3': 'Hewlett Packard Enterprise', '1C2E1B': 'Suzhou Tremenet Communication Technology Co., Ltd.', '1C24EB': 'Burlywood', '001013': 'Kontron America, Inc.', '2C2BF9': 'LG Innotek', 'D8C7C8': 'Aruba, a Hewlett Packard Enterprise Company', '703A0E': 'Aruba, a Hewlett Packard Enterprise Company', '204C03': 'Aruba, a Hewlett Packard Enterprise Company', '58C6F0': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '84A06E': 'Sagemcom Broadband SAS', 'A43EA0': 'iComm HK LIMITED', '64C2DE': 'LG Electronics (Mobile Communications)', '8C444F': 'HUMAX Co., Ltd.', '006762': 'Fiberhome Telecommunication Technologies Co.,LTD', '2CC407': 'machineQ', 'B4ED19': 'Pie Digital, Inc.', '40DF02': 'LINE BIZ Plus', 'D43B04': 'Intel Corporate', 'CCE194': 'Juniper Networks', '900218': 'BSkyB Ltd', '144E2A': 'Ciena Corporation', '84139F': 'zte corporation', 'F051EA': 'Fitbit, Inc.', '5033F0': 'YICHEN (SHENZHEN) TECHNOLOGY CO.LTD', 'FC2BB2': 'Actiontec Electronics, Inc', 'E09F2A': 'Iton Technology Corp. ', '4CE19E': 'TECNO MOBILE LIMITED', '7495EC': 'ALPS ELECTRIC CO., LTD.', 'AC5AEE': 'China Mobile Group Device Co.,Ltd.', '70BC10': 'Microsoft Corporation', '884A18': 'Opulinks', '0006F7': 'ALPS ELECTRIC CO., LTD.', '000704': 'ALPS ELECTRIC CO., LTD.', '0006F5': 'ALPS ELECTRIC CO., LTD.', '34C731': 'ALPS ELECTRIC CO., LTD.', '9C69B4': 'IEEE Registration Authority', '500084': 'Siemens Canada', '64D4BD': 'ALPS ELECTRIC CO., LTD.', '0498F3': 'ALPS ELECTRIC CO., LTD.', '00214F': 'ALPS ELECTRIC CO., LTD.', '44B433': 'tide.co.,ltd', 'D8A6FD': 'Ghost Locomotion', 'DC21B9': 'Sentec Co.Ltd', '6CDFFB': 'IEEE Registration Authority', '247D4D': 'Texas Instruments', '8850F6': 'Shenzhen Jingxun Software Telecommunication Technology Co.,Ltd', 'E498BB': 'Phyplus Microelectronics Limited', '60A11E': 'Wuhan Maxsine Electric Co.,Ltd.', 'C45BF7': 'ants', '8CDF9D': 'NEC Corporation', '5C415A': 'Amazon.com, LLC', '705E55': 'Realme Chongqing MobileTelecommunications Corp Ltd', 'B0D568': 'Shenzhen Cultraview Digital Technology Co., Ltd', 'F00EBF': 'ZettaHash Inc.', '703509': 'Cisco Systems, Inc', '04EA56': 'Intel Corporate', 'D0C637': 'Intel Corporate', '441AFA': 'New H3C Technologies Co., Ltd', '04072E': 'VTech Electronics Ltd.', '780ED1': 'TRUMPF Werkzeugmaschinen GmbH+Co.KG', '44ECCE': 'Juniper Networks', 'F82F08': 'Molex CMS', '441C12': 'Technicolor CH USA Inc.', '4428A3': 'Jiangsu fulian Communication Technology Co., Ltd.', '10C595': 'Lenovo', '203233': 'SHENZHEN BILIAN ELECTRONIC CO.,LTD', '6829DC': 'Ficosa Electronics S.L.U.', '9454DF': 'YST CORP.', '7CBC84': 'IEEE Registration Authority', 'F80DF1': 'Sontex SA', 'A49426': 'Elgama-Elektronika Ltd.', 'E4F14C': 'Private', 'A8B456': 'Cisco Systems, Inc', '2CA9F0': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '549B72': 'Ericsson AB', 'A047D7': 'Best IT World (India) Pvt Ltd', '6899CD': 'Cisco Systems, Inc', '1040F3': 'Apple, Inc.', '586B14': 'Apple, Inc.', 'BCB863': 'Apple, Inc.', '2C1CF6': 'Alien Green LLC', '6C2B59': 'Dell Inc.', '44E66E': 'Apple, Inc.', 'C0E862': 'Apple, Inc.', 'F40616': 'Apple, Inc.', '0CFE5D': 'IEEE Registration Authority', '3C8D20': 'Google, Inc.', '601D91': 'Motorola Mobility LLC, a Lenovo Company', 'D4C94B': 'Motorola Mobility LLC, a Lenovo Company', '08351B': 'Shenzhen Jialihua Electronic Technology Co., Ltd', 'AC1585': 'silergy corp', 'AC5093': 'Magna Electronics Europe GmbH & Co. OHG', '70BBE9': 'Xiaomi Communications Co Ltd', '50A0A4': 'Nokia', '00D02D': 'Resideo', '806940': 'LEXAR CO.,LIMITED', '64F81C': 'Huawei Technologies Co., Ltd.', '1098C3': 'Murata Manufacturing Co., Ltd.', '9CC8FC': 'ARRIS Group, Inc.', 'B07E11': 'Texas Instruments', '10C753': 'Qingdao Intelligent&Precise Electronics Co.,Ltd.', 'F4951B': 'Hefei Radio Communication Technology Co., Ltd ', '6C3845': 'Fiberhome Telecommunication Technologies Co.,LTD', '2C6104': 'SHENZHEN FENGLIAN TECHNOLOGY CO., LTD.', 'BC9325': 'Ningbo Joyson Preh Car Connect Co.,Ltd. ', 'D0B60A': 'Xingluo Technology Company Limited', '049226': 'ASUSTek COMPUTER INC.', 'E8ADA6': 'Sagemcom Broadband SAS', '0C1C19': 'LONGCONN ELECTRONICS(SHENZHEN) CO.,LTD', '90E710': 'New H3C Technologies Co., Ltd', '302952': 'Hillstone Networks Inc', 'E013B5': 'vivo Mobile Communication Co., Ltd.', 'E0795E': 'Wuxi Xiaohu Technology Co.,Ltd.', '00B1E3': 'Cisco Systems, Inc', 'A41194': 'Lenovo', '00CB00': 'Private', 'DCF401': 'Dell Inc.', '0C4101': 'Ruichi Auto Technology (Guangzhou) Co., Ltd.', '00B771': 'Cisco Systems, Inc', 'E4B2FB': 'Apple, Inc.', '2CCA0C': 'WITHUS PLANET', '84326F': 'GUANGZHOU AVA ELECTRONICS TECHNOLOGY CO.,LTD ', 'C89C13': 'Inspiremobile', '8C85E6': 'Cleondris GmbH', '807D14': 'HUAWEI TECHNOLOGIES CO.,LTD', '20283E': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A4FC77': 'Mega Well Limited', '4C569D': 'Apple, Inc.', '38539C': 'Apple, Inc.', '402619': 'Apple, Inc.', '6CE85C': 'Apple, Inc.', '049162': 'Microchip Technology Inc.', 'F83880': 'Apple, Inc.', '2C79D7': 'Sagemcom Broadband SAS', '00B4F5': 'DongGuan Siyoto Electronics Co., Ltd ', 'BC3F4E': 'Teleepoch Ltd', '1838AE': 'CONSPIN SOLUTION', '04CF8C': 'XIAOMI Electronics,CO.,LTD', '0C7512': 'Shenzhen Kunlun TongTai Technology Co.,Ltd.', '7483C2': 'Ubiquiti Networks Inc.', 'E063DA': 'Ubiquiti Networks Inc.', '50579C': 'Seiko Epson Corporation', '983B8F': 'Intel Corporate', '54278D': 'NXP (China) Management Ltd.', 'B0BE76': 'TP-LINK TECHNOLOGIES CO.,LTD.', '4447CC': 'Hangzhou Hikvision Digital Technology Co.,Ltd.', '4CD98F': 'Dell Inc.', 'B0AE25': 'Varikorea', '4C1B86': 'Arcadyan Corporation', 'ECC40D': 'Nintendo Co.,Ltd', '440049': 'Amazon Technologies Inc.', 'E86A64': 'LCFC(HeFei) Electronics Technology co., ltd', '10A24E': 'GOLD3LINK ELECTRONICS CO., LTD', 'CCC5E5': 'Dell Inc.', '6CC374': 'Texas Instruments', '684749': 'Texas Instruments', 'F8D9B8': 'Open Mesh, Inc.', '7C696B': 'Atmosic Technologies', '5CD20B': 'Yytek Co., Ltd.', '70037E': 'Technicolor CH USA Inc.', 'D003DF': 'Samsung Electronics Co.,Ltd', 'C423A2': 'PT. Emsonic Indonesia', 'B4CB57': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '4C1265': 'ARRIS Group, Inc.', '00500C': 'e-Tek Labs, Inc.', '485F99': 'Cloud Network Technology (Samoa) Limited', '8834FE': 'Bosch Automotive Products (Suzhou) Co. Ltd', '88F7BF': 'vivo Mobile Communication Co., Ltd.', 'D87D7F': 'Sagemcom Broadband SAS', '00051A': '3COM EUROPE LTD', '08004E': '3COM EUROPE LTD', '00301E': '3COM EUROPE LTD', '005004': '3COM', '000103': '3COM', '58B568': 'SECURITAS DIRECT ESPAÑA, SAU', '484AE9': 'Hewlett Packard Enterprise', '846A66': 'Sumitomo Kizai Co.,Ltd.', '18D717': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '80B624': 'IVS', 'DCF505': 'AzureWave Technology Inc.', 'CCF0FD': 'China Mobile (Hangzhou) Information Technology Co., Ltd.', '8489EC': 'IEEE Registration Authority', '88108F': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F4631F': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A49B4F': 'HUAWEI TECHNOLOGIES CO.,LTD', '00073A': 'INVENTEL', '00266C': 'INVENTEC CORPORATION', '008CFA': 'INVENTEC CORPORATION', '5CFB7C': 'Shenzhen Jingxun Software Telecommunication Technology Co.,Ltd', 'FC039F': 'Samsung Electronics Co.,Ltd', '02C08C': '3COM', '0057C1': 'LG Electronics (Mobile Communications)', '7C240C': 'Telechips, Inc.', '00203D': 'Honeywell Environmental & Combustion Controls', '004084': 'Honeywell International HPS', '1C1BB5': 'Intel Corporate', 'A4D990': 'Samsung Electronics Co.,Ltd', '006087': 'KANSAI ELECTRIC CO., LTD.', 'DCF719': 'Cisco Systems, Inc', 'A0950C': 'China Mobile IOT Company Limited', 'D4741B': 'Beijing HuaDa ZhiBao Electronic System Co.,Ltd.', '001BC0': 'Juniper Networks', '2C15E1': 'Phicomm (Shanghai) Co., Ltd.', '30D16B': 'Liteon Technology Corporation', '98AE71': 'VVDN Technologies Pvt Ltd', 'A456CC': 'Technicolor CH USA Inc.', 'AC6E1A': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', '0080EB': 'COMPCONTROL B.V.', '0002EB': 'Pico Communications', '342EB6': 'HUAWEI TECHNOLOGIES CO.,LTD', 'AC9232': 'HUAWEI TECHNOLOGIES CO.,LTD', '000FB0': 'Compal Electronics INC.', '00023F': 'Compal Electronics INC.', '68A8E1': 'Wacom Co.,Ltd.', '30D32D': 'devolo AG', 'BCF2AF': 'devolo AG', 'E0AF4F': 'Deutsche Telekom AG', 'DC8B28': 'Intel Corporate', 'B869F4': 'Routerboard.com', '283A4D': 'Cloud Network Technology (Samoa) Limited', 'B87C6F': 'NXP (China) Management Ltd.', '305DA6': 'ADVALY SYSTEM Inc.', 'BC30D9': 'Arcadyan Corporation', '0479B7': 'Texas Instruments', 'C0D0FF': 'China Mobile IOT Company Limited', '88DF9E': 'New H3C Technologies Co., Ltd', '2C7CE4': 'Wuhan Tianyu Information Industry Co., Ltd.', '5803FB': 'Hangzhou Hikvision Digital Technology Co.,Ltd.', '144802': 'THE YEOLRIM Co.,Ltd.', 'FC4AE9': 'Castlenet Technology Inc.', '40313C': 'XIAOMI Electronics,CO.,LTD', 'FC4596': 'COMPAL INFORMATION (KUNSHAN) CO., LTD. ', 'A0E534': 'Stratec Biomedical AG', '444B5D': 'GE Healthcare', '001555': 'DFM GmbH', '1C7508': 'COMPAL INFORMATION (KUNSHAN) CO., LTD. ', '001B38': 'COMPAL INFORMATION (KUNSHAN) CO., LTD. ', '00235A': 'COMPAL INFORMATION (KUNSHAN) CO., LTD. ', '24D76B': 'Syntronic AB', 'C4FEE2': 'AMICCOM Electronics Corporation', '780CF0': 'Cisco Systems, Inc', '0C8C24': 'SHENZHEN BILIAN ELECTRONIC CO.,LTD', '8C6D77': 'HUAWEI TECHNOLOGIES CO.,LTD', 'E8C57A': 'Ufispace Co., LTD.', 'E01283': ' Shenzhen Fanzhuo Communication Technology Co., Lt', 'A0CF5B': 'Cisco Systems, Inc', '002421': "MICRO-STAR INT'L CO., LTD.", '0060D1': 'CASCADE COMMUNICATIONS', '84C9C6': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', '88B66B': 'easynetworks', '24F57E': 'HWH CO., LTD.', '8CA048': 'Beijing NeTopChip Technology Co.,LTD', '24D3F2': 'zte corporation', 'D469A5': 'Miura Systems Ltd.', '8C8126': 'ARCOM', 'D47C44': 'IEEE Registration Authority', '805E4F': 'FN-LINK TECHNOLOGY LIMITED', '8417EF': 'Technicolor CH USA Inc.', '3856B5': 'Peerbridge Health Inc', '7C96D2': 'Fihonest communication co.,Ltd', '302432': 'Intel Corporate', 'C042D0': 'Juniper Networks', 'D0C5D8': 'LATECOERE', '7054B4': 'Vestel Elektronik San ve Tic. A.Ş.', '20A60C': 'Xiaomi Communications Co Ltd', '488AD2': 'MERCURY COMMUNICATION TECHNOLOGIES CO.,LTD.', 'DCE838': 'CK Telecom (Shenzhen) Limited', 'A8D498': 'Avira Operations GmbH & Co. KG', '505967': 'Intent Solutions Inc', '000680': 'Card Access, Inc.', '3C576C': 'Samsung Electronics Co.,Ltd', '841766': 'WEIFANG GOERTEK ELECTRONICS CO.,LTD', '2C4D79': 'WEIFANG GOERTEK ELECTRONICS CO.,LTD', '000C42': 'Routerboard.com', '0026BD': 'JTEC Card &amp; Communication Co., Ltd', '60D02C': 'Ruckus Wireless', 'D058FC': 'BSkyB Ltd', '14579F': 'HUAWEI TECHNOLOGIES CO.,LTD', 'B44326': 'HUAWEI TECHNOLOGIES CO.,LTD', '04AB18': 'ELECOM CO.,LTD.', '78D294': 'NETGEAR', '709FA9': 'TECNO MOBILE LIMITED', '0C01DB': 'Infinix mobility limited', '08C5E1': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', '1866C7': 'Shenzhen Libre Technology Co., Ltd', '5CB3F6': 'Human, Incorporated', '2C4835': 'IEEE Registration Authority', '482AE3': 'Wistron InfoComm(Kunshan)Co.,Ltd.', 'FCA6CD': 'Fiberhome Telecommunication Technologies Co.,LTD', '44C874': 'China Mobile Group Device Co.,Ltd.', '74C14F': 'HUAWEI TECHNOLOGIES CO.,LTD', 'B0EB57': 'HUAWEI TECHNOLOGIES CO.,LTD', '1869DA': 'China Mobile Group Device Co.,Ltd.', 'F85C4D': 'Nokia', '2C584F': 'ARRIS Group, Inc.', 'F04B3A': 'Juniper Networks', '54BF64': 'Dell Inc.', 'A85B6C': 'Robert Bosch Gmbh, CM-CI2', 'C8B1EE': 'Qorvo', '00FCBA': 'Cisco Systems, Inc', '00CBB4': 'SHENZHEN ATEKO PHOTOELECTRICITY CO.,LTD', '4CC00A': 'vivo Mobile Communication Co., Ltd.', '9CE82B': 'vivo Mobile Communication Co., Ltd.', '7079B3': 'Cisco Systems, Inc', 'D818D3': 'Juniper Networks', '149B2F': 'JiangSu ZhongXie Intelligent Technology co., LTD', '3835FB': 'Sagemcom Broadband SAS', '48DD9D': 'ITEL MOBILE LIMITED', 'A075EA': 'BoxLock, Inc.', 'F04CD5': 'Maxlinear, Inc', '0001AE': 'Trex Enterprises', '00E009': 'Stratus Technologies', 'E4EA83': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', '74EC42': 'Fiberhome Telecommunication Technologies Co.,LTD', 'D4FC13': 'Fiberhome Telecommunication Technologies Co.,LTD', '807D3A': 'Espressif Inc.', 'ECAF97': 'GIT', 'A0B045': 'Halong Mining', 'E0BAB4': 'Arrcus, Inc', '589B0B': 'Shineway Technologies, Inc.', 'D8160A': 'Nippon Electro-Sensory Devices', '10C07C': 'Blu-ray Disc Association', '40A677': 'Juniper Networks', 'E4B021': 'Samsung Electronics Co.,Ltd', '9C7F57': 'UNIC Memory Technology Co Ltd', 'B4E01D': 'CONCEPTION ELECTRONIQUE', '1C0042': 'NARI Technology Co., Ltd.', '4434A7': 'ARRIS Group, Inc.', '3CE1A1': 'Universal Global Scientific Industrial Co., Ltd.', 'F898EF': 'HUAWEI TECHNOLOGIES CO.,LTD', '58F987': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A8F5AC': 'HUAWEI TECHNOLOGIES CO.,LTD', '58BAD4': 'HUAWEI TECHNOLOGIES CO.,LTD', '701D08': '99IOT Shenzhen co.,ltd', '7412BB': 'Fiberhome Telecommunication Technologies Co.,LTD', '001027': 'L-3 COMMUNICATIONS EAST', 'BC2643': 'Elprotronic Inc.', '04E229': 'Qingdao Haier Technology Co.,Ltd', '348B75': 'LAVA INTERNATIONAL(H.K) LIMITED', '9CE895': 'New H3C Technologies Co., Ltd', '00583F': 'PC Aquarius', '903D68': 'G-Printec, Inc.', '1094BB': 'Apple, Inc.', '781D4A': 'zte corporation', '94E1AC': 'Hangzhou Hikvision Digital Technology Co.,Ltd.', '34E894': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'F86FC1': 'Apple, Inc.', '28FF3C': 'Apple, Inc.', 'F099B6': 'Apple, Inc.', '88E9FE': 'Apple, Inc.', '38892C': 'Apple, Inc.', '749EAF': 'Apple, Inc.', '94BF2D': 'Apple, Inc.', '68CAE4': 'Cisco Systems, Inc', '00BE3B': 'HUAWEI TECHNOLOGIES CO.,LTD', '7CA177': 'HUAWEI TECHNOLOGIES CO.,LTD', '242E02': 'HUAWEI TECHNOLOGIES CO.,LTD', '78B6EC': 'Scuf Gaming International LLC', '8035C1': 'Xiaomi Communications Co Ltd', '58B3FC': 'SHENZHEN RF-LINK TECHNOLOGY CO.,LTD.', '2047DA': 'Xiaomi Communications Co Ltd', '2429FE': 'KYOCERA Corporation ', '7C49EB': 'XIAOMI Electronics,CO.,LTD', 'C43306': 'China Mobile Group Device Co.,Ltd.', '08DFCB': 'Systrome Networks', 'A4933F': 'HUAWEI TECHNOLOGIES CO.,LTD', '28AC9E': 'Cisco Systems, Inc', '2CB8ED': 'SonicWall', '68D482': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', '984562': 'Shanghai Baud Data Communication Co.,Ltd.', 'E4C483': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '001FBA': 'Boyoung Tech', 'A433D7': 'MitraStar Technology Corp.', 'B0ACD2': 'zte corporation', '200F70': 'FOXTECH', '202D23': 'Collinear Networks Inc.', '90834B': 'BEIJING YUNYI TIMES TECHNOLOGY CO,.LTD', '18502A': 'SOARNEX', 'A8367A': 'frogblue TECHNOLOGY GmbH', '6CE4DA': 'NEC Platforms, Ltd.', '10E7C6': 'Hewlett Packard', '1831BF': 'ASUSTek COMPUTER INC.', 'C8FAE1': 'ARQ Digital LLC', 'DCA333': 'Shenzhen YOUHUA Technology Co., Ltd', '788C54': 'Ping Communication', 'B8AF67': 'Hewlett Packard', 'C098DA': 'China Mobile IOT Company Limited', 'F00FEC': 'HUAWEI TECHNOLOGIES CO.,LTD', 'AC075F': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C048E6': 'Samsung Electronics Co.,Ltd', '882E5A': 'storONE', '007147': 'Amazon Technologies Inc.', '00BE75': 'Cisco Systems, Inc', '64DB8B': 'Hangzhou Hikvision Digital Technology Co.,Ltd.', '78257A': 'LEO Innovation Lab', '10A4B9': 'Baidu Online Network Technology (Beijing) Co., Ltd', '501CB0': 'Cisco Systems, Inc', '04AC44': 'Holtek Semiconductor Inc.', 'F4DCA5': 'DAWON DNS', '0C5415': 'Intel Corporate', '80CE62': 'Hewlett Packard', '801F12': 'Microchip Technology Inc.', '506CBE': 'InnosiliconTechnology Ltd', '247E12': 'Cisco Systems, Inc', '04C241': 'Nokia', '3C479B': 'Theissen Training Systems, Inc.', '606BFF': 'Nintendo Co.,Ltd', '8CF710': 'AMPAK Technology, Inc.', '307BAC': 'New H3C Technologies Co., Ltd', '785DC8': 'LG Electronics', '3C0461': 'ARRIS Group, Inc.', '883D24': 'Google, Inc.', 'E8C1B8': ' Nanjing Bangzhong Electronic Commerce Limited', 'D8D775': 'Sagemcom Broadband SAS', 'E8330D': 'Xaptec GmbH', 'D460E3': 'Sercomm Corporation.', 'B4A8B9': 'Cisco Systems, Inc', '50DCE7': 'Amazon Technologies Inc.', '649829': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '081DC4': 'Thermo Fisher Scientific Messtechnik GmbH', '785364': 'SHIFT GmbH', '38E60A': 'Xiaomi Communications Co Ltd', '40CBC0': 'Apple, Inc.', 'C4618B': 'Apple, Inc.', 'CC6EA4': 'Samsung Electronics Co.,Ltd', '5C5F67': 'Intel Corporate', '803A59': 'AT&T', '588D64': "Xi'an Clevbee Technology Co.,Ltd", '005D73': 'Cisco Systems, Inc', '606D3C': 'Luxshare Precision Industry Company Limited', '44F034': 'Kaonmedia CO., LTD.', '002790': 'Cisco Systems, Inc', '34BA38': 'PAL MOHAN ELECTRONICS PVT LTD', '9829A6': 'COMPAL INFORMATION (KUNSHAN) CO., LTD. ', 'CC4D38': 'Carnegie Technologies', '08E689': 'Apple, Inc.', 'DC56E7': 'Apple, Inc.', 'A816D0': 'Samsung Electronics Co.,Ltd', 'A46CF1': 'Samsung Electronics Co.,Ltd', '08AED6': 'Samsung Electronics Co.,Ltd', 'DCBFE9': 'Motorola Mobility LLC, a Lenovo Company', 'B42D56': 'Extreme Networks, Inc.', '4064A4': 'THE FURUKAWA ELECTRIC CO., LTD', '6CB2AE': 'Cisco Systems, Inc', 'B0982B': 'Sagemcom Broadband SAS', '34FA9F': 'Ruckus Wireless', 'F065C2': 'Yanfeng Visteon Electronics Technology (Shanghai) Co.,Ltd.', '70B7E2': 'Jiangsu Miter Technology Co.,Ltd.', '808DB7': 'Hewlett Packard Enterprise', 'A09D86': 'Alcatel-Lucent Shanghai Bell Co., Ltd', '00C0FF': 'Seagate Cloud Systems Inc', 'D45DDF': 'PEGATRON CORPORATION', 'F8B568': 'IEEE Registration Authority', '2C6B7D': 'Texas Instruments', '88D171': 'BEGHELLI S.P.A', 'A09DC1': 'China Dragon Technology Limited', '2C4205': 'Lytx', 'A825EB': 'Cambridge Industries(Group) Co.,Ltd.', '34E380': 'Genexis B.V.', '5C5819': 'Jingsheng Technology Co., Ltd.', 'B8CA04': 'Holtek Semiconductor Inc.', 'C4C563': 'TECNO MOBILE LIMITED', '80B708': 'Blue Danube Systems, Inc', '08BC20': 'Hangzhou Royal Cloud Technology Co., Ltd', '942A3F': 'Diversey Inc', '2031EB': 'HDSN', 'F8C96C': 'Fiberhome Telecommunication Technologies Co.,LTD', '844823': 'WOXTER TECHNOLOGY Co. Ltd', 'F41E5E': 'RtBrick Inc.', '6C7660': 'KYOCERA CORPORATION ', '002102': 'UpdateLogic Inc.', '505800': 'WyTec International, Inc.', 'C8D12A': 'Comtrend Corporation', '0CEAC9': 'ARRIS Group, Inc.', 'E82A44': 'Liteon Technology Corporation', '10A4BE': 'SHENZHEN BILIAN ELECTRONIC CO.,LTD', '947BBE': 'Ubicquia', 'ECC06A': 'PowerChord Group Limited', '944996': 'WiSilica Inc', 'F81D0F': 'Hitron Technologies. Inc', '58C935': 'Chiun Mai Communication Systems, Inc', '0094A1': 'F5 Networks, Inc.', 'BCF292': 'PLANTRONICS, INC.', '0450DA': 'Qiku Internet Network Scientific (Shenzhen) Co., Ltd', 'E820E2': 'HUMAX Co., Ltd.', '0026A8': 'DAEHAP HYPER-TECH', '785C28': 'Prime Motion Inc.', 'F4EAB5': 'Aerohive Networks Inc.', '8C5BF0': 'ARRIS Group, Inc.', '1890D8': 'Sagemcom Broadband SAS', '88835D': 'FN-LINK TECHNOLOGY LIMITED', 'EC9F0D': 'IEEE Registration Authority', 'E078A3': 'Shanghai Winner Information Technology Co.,Inc', '0005A7': 'HYPERCHIP Inc.', '088466': 'Novartis Pharma AG', '309FFB': 'Ardomus Networks Corporation', '282373': 'Digita', '649A08': 'Shenzhen SuperElectron Technology Co.,LTD', '68A682': 'Shenzhen YOUHUA Technology Co., Ltd', '587A62': 'Texas Instruments', '547A52': 'CTE International srl', 'F06E0B': 'Microsoft Corporation', '346FED': 'Enovation Controls', '5433CB': 'Apple, Inc.', '3408BC': 'Apple, Inc.', '1C36BB': 'Apple, Inc.', '3C2EFF': 'Apple, Inc.', '00E025': 'dit Co., Ltd.', 'AC84C6': 'TP-LINK TECHNOLOGIES CO.,LTD.', '001530': 'Dell EMC', '0CB2B7': 'Texas Instruments', '24F677': 'Apple, Inc.', 'B0CA68': 'Apple, Inc.', 'C83C85': 'Apple, Inc.', '78870D': 'Unifiedgateways India Private Limited', 'A88200': 'Hisense Electric Co.,Ltd', '3820A8': 'ColorTokens, Inc.', '705896': 'InShow Technology', '000589': 'National Datacomputer', '3CA616': 'vivo Mobile Communication Co., Ltd.', '9CE063': 'Samsung Electronics Co.,Ltd', 'D03169': 'Samsung Electronics Co.,Ltd', '24F27F': 'Hewlett Packard Enterprise', 'BC0543': 'AVM GmbH', '00B69F': 'Latch', '842C80': 'Sichuan Changhong Electric Ltd.', '3CC079': 'Shenzhen One-Nine Intelligent Electronic Science and Technology Co., Ltd', '98C5DB': 'Ericsson AB', '149F3C': 'Samsung Electronics Co.,Ltd', 'FCEEE6': 'FORMIKE ELECTRONIC CO., LTD', '84E327': 'TAILYN TECHNOLOGIES INC', '0021B8': 'Inphi Corporation', '0C9160': 'Hui Zhou Gaoshengda Technology Co.,LTD', 'D8ED1C': 'Magna Technology SL', 'D83134': 'Roku, Inc', '408BF6': 'Shenzhen TCL New Technology Co., Ltd', '00006B': 'Silicon Graphics', '74373B': 'UNINET Co.,Ltd.', '7C6456': 'Samsung Electronics Co.,Ltd', 'F46E24': 'NEC Personal Computers, Ltd.', '888279': 'Shenzhen RB-LINK Intelligent Technology Co.Ltd', '68C63A': 'Espressif Inc.', 'A0648F': 'ASKEY COMPUTER CORP', 'C850E9': 'Raisecom Technology CO., LTD', '10F163': 'TNK CO.,LTD', '88DA1A': 'Redpine Signals, Inc.', '98EF9B': 'OHSUNG', '14CF8D': 'OHSUNG', '104400': 'HUAWEI TECHNOLOGIES CO.,LTD', 'B0E17E': 'HUAWEI TECHNOLOGIES CO.,LTD', 'E4A7C5': 'HUAWEI TECHNOLOGIES CO.,LTD', '5C8D2D': 'Shanghai Wellpay Information Technology Co., Ltd', '90FD9F': 'Silicon Laboratories', 'B430C0': 'York Instruments Ltd', 'E81DA8': 'Ruckus Wireless', 'F03D03': 'TECNO MOBILE LIMITED', 'DCF090': 'Nubia Technology Co.,Ltd.', 'A0FE61': 'Vivint Wireless Inc. ', '5C2BF5': 'Vivint Wireless Inc. ', 'CC5A53': 'Cisco Systems, Inc', '006088': 'Analog Devices, Inc.', '084ACF': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '2C279E': 'IEEE Registration Authority', '8C5F48': 'Continental Intelligent Transportation Systems LLC', '947EB9': 'National Narrowband Network Communications Pty Ltd', '646E69': 'Liteon Technology Corporation', '706BB9': 'Cisco Systems, Inc', 'D4389C': 'Sony Mobile Communications Inc', '00C0EE': 'KYOCERA Display Corporation', '245FDF': 'KYOCERA CORPORATION ', '9C63ED': 'zte corporation', '74F661': 'Schneider Electric Fire & Security Oy', 'B8634D': 'Apple, Inc.', '24C42F': 'Philips Lifeline', '2CB21A': 'Phicomm (Shanghai) Co., Ltd.', 'A4E975': 'Apple, Inc.', '3035AD': 'Apple, Inc.', '844167': 'Apple, Inc.', '9800C6': 'Apple, Inc.', 'AC1F74': 'Apple, Inc.', 'A85C2C': 'Apple, Inc.', '00DB70': 'Apple, Inc.', 'FCE557': 'Nokia Corporation', '48C58D': 'Lear Corporation GmbH', '00289F': 'Semptian Co., Ltd.', '9C305B': 'Hon Hai Precision Ind. Co.,Ltd.', '104E89': 'Garmin International', 'D8C497': 'Quanta Computer Inc.', 'BCC342': 'Panasonic Communications Co., Ltd.', '001BD3': 'Panasonic Corporation AVC Networks Company', 'CC7EE7': 'Panasonic Corporation AVC Networks Company', '20C6EB': 'Panasonic Corporation AVC Networks Company', '64B5C6': 'Nintendo Co.,Ltd', '2830AC': 'Frontiir Co. Ltd.', 'D4D2E5': 'BKAV Corporation', '0050B5': 'FICHET SECURITE ELECTRONIQUE', '001439': 'Blonder Tongue Laboratories, Inc', '20A6CD': 'Hewlett Packard Enterprise', '84802D': 'Cisco Systems, Inc', '001987': 'Panasonic Mobile Communications Co.,Ltd.', 'E470B8': 'Intel Corporate', '741C27': 'ITEL MOBILE LIMITED', '00A0AC': 'GILAT SATELLITE NETWORKS, LTD.', '002609': 'Phyllis Co., Ltd.', '28F537': 'IEEE Registration Authority', '00869C': 'Palo Alto Networks', '94D9B3': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'C84029': 'Fiberhome Telecommunication Technologies Co.,LTD', 'F86EEE': 'HUAWEI TECHNOLOGIES CO.,LTD', '7802B1': 'Cisco Systems, Inc', 'B40F3B': 'Tenda Technology Co.,Ltd.Dongguan branch', '00188D': 'Nokia Danmark A/S', '0015AB': 'PRO CO SOUND INC', '5876C5': "DIGI I'S LTD", 'A8B2DA': 'FUJITSU LIMITED', '001354': 'Zcomax Technologies, Inc.', '78D800': 'IEEE Registration Authority', '0835B2': 'CoreEdge Networks Co., Ltd', '4C49E3': 'Xiaomi Communications Co Ltd', '245880': 'VIZEO', '54666C': 'Shenzhen YOUHUA Technology Co., Ltd', 'A89675': 'Motorola Mobility LLC, a Lenovo Company', '389AF6': 'Samsung Electronics Co.,Ltd', 'E0AA96': 'Samsung Electronics Co.,Ltd', '507705': 'Samsung Electronics Co.,Ltd', 'F83441': 'Intel Corporate', '28D436': 'Jiangsu dewosi electric co., LTD', 'D4B27A': 'ARRIS Group, Inc.', '44EA4B': 'Actlas Inc.', 'C4CB6B': 'Airista Flow, Inc.', '188090': 'Cisco Systems, Inc', '786256': 'HUAWEI TECHNOLOGIES CO.,LTD', 'B05508': 'HUAWEI TECHNOLOGIES CO.,LTD', 'B875C0': 'PayPal, Inc.', '001C71': 'Emergent Electronics', '001A93': 'ERCO Leuchten GmbH', '94F665': 'Ruckus Wireless', 'B090D4': 'Shenzhen Hoin Internet Technology Co., Ltd', 'C0BAE6': 'Application Solutions (Electronics and Vision) Ltd', '8C9F3B': 'Qingdao Hisense Communications Co.,Ltd.', '0014B8': 'Hill-Rom', 'ACED5C': 'Intel Corporate', '842096': 'SHENZHEN RF-LINK TECHNOLOGY CO.,LTD.', 'B4C170': 'Yi chip Microelectronics (Hangzhou) Co., Ltd', 'AC6706': 'Ruckus Wireless', '044FAA': 'Ruckus Wireless', '589396': 'Ruckus Wireless', '001F41': 'Ruckus Wireless', 'C4108A': 'Ruckus Wireless', 'F0B052': 'Ruckus Wireless', '70DF2F': 'Cisco Systems, Inc', '3894E0': 'Syrotech Networks. Ltd.', '34F64B': 'Intel Corporate', 'F46BEF': 'Sagemcom Broadband SAS', '08306B': 'Palo Alto Networks', '10CDB6': 'Essential Products, Inc.', 'A4F3E7': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', 'E43A6E': 'Shenzhen Zeroone Technology CO.,LTD', '7CE2CA': 'Juniper Networks', '9061AE': 'Intel Corporate', '6CF9D2': 'CHENGDU POVODO ELECTRONIC TECHNOLOGY CO., LTD', '30B62D': ' Mojo Networks, Inc.', '50184C': 'Platina Systems Inc.', 'F4B7B3': 'vivo Mobile Communication Co., Ltd.', 'CC03D9': 'Cisco Meraki', 'FCA667': 'Amazon Technologies Inc.', 'C81FEA': 'Avaya Inc', '0027E3': 'Cisco Systems, Inc', '0018AE': 'TVT CO.,LTD', '8891DD': 'Racktivity', '1C4593': 'Texas Instruments', '90EC50': 'C.O.B.O. SPA', 'E0D848': 'Dell Inc.', '60271C': 'VIDEOR E. Hartig GmbH', '00EC0A': 'Xiaomi Communications Co Ltd', 'C8D7B0': 'Samsung Electronics Co.,Ltd', '6C60EB': 'ZHI YUAN ELECTRONICS CO., LIMITED', '74DADA': 'D-Link International', 'D8F1F0': 'Pepxim International Limited', 'DCC8F5': 'Shanghai UMEinfo CO.,LTD.', '88D7F6': 'ASUSTek COMPUTER INC.', '9097F3': 'Samsung Electronics Co.,Ltd', '7C1C68': 'Samsung Electronics Co.,Ltd', 'C087EB': 'Samsung Electronics Co.,Ltd', '04714B': 'IEEE Registration Authority', '2C41A1': 'Bose Corporation', '4C38D8': 'ARRIS Group, Inc.', '447BBB': 'Shenzhen YOUHUA Technology Co., Ltd', '9C7BD2': 'NEOLAB Convergence', 'D0F88C': 'Motorola (Wuhan) Mobility Technologies Communication Co., Ltd.', '2CB115': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '34873D': 'Quectel Wireless Solution Co.,Ltd.', '10D07A': 'AMPAK Technology, Inc.', 'D4C1C8': 'zte corporation', '88D274': 'zte corporation', '002449': 'Shen Zhen Lite Star Electronics Technology Co., Ltd', '00E18C': 'Intel Corporate', '847933': 'profichip GmbH', '881544': 'Cisco Meraki', 'C4ABB2': 'vivo Mobile Communication Co., Ltd.', '80B234': 'Technicolor CH USA Inc.', '44B412': 'SIUS AG', '0CB912': 'JM-DATA GmbH', '3CA308': 'Texas Instruments', 'F43E61': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', 'B4417A': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', '185207': 'SICHUAN TIANYI COMHEART TELECOMCO., LTD', '000062': 'BULL HN INFORMATION SYSTEMS', 'E8C1D7': 'Philips', '4C8120': 'Taicang T&W Electronics', '28A6DB': 'HUAWEI TECHNOLOGIES CO.,LTD', '14A0F8': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C8F86D': 'Alcatel-Lucent Shanghai Bell Co., Ltd', '6045CB': 'ASUSTek COMPUTER INC.', '00118B': 'Alcatel-Lucent Enterprise', '00E0B1': 'Alcatel-Lucent Enterprise', '00E0DA': 'Alcatel-Lucent Enterprise', 'F8BE0D': 'A2UICT Co.,Ltd.', 'E442A6': 'Intel Corporate', '3C678C': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D4503F': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '388C50': 'LG Electronics', 'DC0856': 'Alcatel-Lucent Enterprise', '1CDA27': 'vivo Mobile Communication Co., Ltd.', '90F305': 'HUMAX Co., Ltd.', '4095BD': 'NTmore.Co.,Ltd', '98AAFC': 'IEEE Registration Authority', '00143F': 'Hotway Technology Corporation', 'D055B2': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '144FD7': 'IEEE Registration Authority', 'B85510': 'Zioncom Electronics (Shenzhen) Ltd.', '049573': 'zte corporation', 'F0D7AA': 'Motorola Mobility LLC, a Lenovo Company', 'F8FF0B': 'Electronic Technology Inc.', '7C6BF7': 'NTI co., ltd.', '00D318': 'SPG Controls', '3096FB': 'Samsung Electronics Co.,Ltd', '4827EA': 'Samsung Electronics Co.,Ltd', '7C787E': 'Samsung Electronics Co.,Ltd', '245BA7': 'Apple, Inc.', '70F087': 'Apple, Inc.', 'E0C63C': 'SICHUAN TIANYI COMHEART TELECOMCO., LTD', '000C46': 'Allied Telesyn Inc.', '001F72': 'QingDao Hiphone Technology Co,.Ltd', '002365': 'Insta Elektro GmbH', '6091F3': 'vivo Mobile Communication Co., Ltd.', '28395E': 'Samsung Electronics Co.,Ltd', '38295A': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '88E628': 'Shenzhen Kezhonglong Optoelectronic Technology Co.,Ltd', '58238C': 'Technicolor CH USA Inc.', 'CC82EB': 'KYOCERA CORPORATION ', '14987D': 'Technicolor CH USA Inc.', 'D4CF37': 'Symbolic IO', 'D47DFC': 'TECNO MOBILE LIMITED', '409F38': 'AzureWave Technology Inc.', '000631': 'Calix Inc.', 'BC2F3D': 'vivo Mobile Communication Co., Ltd.', '40FA7F': 'Preh Car Connect GmbH', '000DBB': 'Nippon Dentsu Co.,Ltd.', '2C7E81': 'ARRIS Group, Inc.', '407D0F': 'HUAWEI TECHNOLOGIES CO.,LTD', '68CC6E': 'HUAWEI TECHNOLOGIES CO.,LTD', '3034D2': 'Availink, Inc.', '504061': 'Nokia', '00108E': 'HUGH SYMONS CONCEPT Technologies Ltd.', 'E05163': 'Arcadyan Corporation', '54E3F6': 'Alcatel-Lucent', '40B034': 'Hewlett Packard', 'B816DB': 'CHANT SINCERE CO.,LTD', '40B4CD': 'Amazon Technologies Inc.', 'D481D7': 'Dell Inc.', 'F42B48': 'Ubiqam', '50F14A': 'Texas Instruments', '04DEF2': 'Shenzhen ECOM Technology Co. Ltd', '540384': 'Hangkong Nano IC Technologies Co., Ltd', '78C1A7': 'zte corporation', '4C7872': 'Cav. Uff. Giacomo Cimberio S.p.A. ', '8CF5A3': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', '8CC8F4': 'IEEE Registration Authority', 'F483E1': 'Shanghai Clouder Semiconductor Co.,Ltd', '083E5D': 'Sagemcom Broadband SAS', '3CBD3E': 'Beijing Xiaomi Electronics Co., Ltd.', '641A22': 'Heliospectra AB', 'A084CB': 'SonicSensory,Inc.', 'D47AE2': 'Samsung Electronics Co.,Ltd', '6854FD': 'Amazon Technologies Inc.', '0003BC': 'COT GmbH', 'D4B169': 'Le Shi Zhi Xin Electronic Technology (Tianjin) Limited', 'E44790': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '38454C': 'Light Labs, Inc.', '000A49': 'F5 Networks, Inc.', '00A0C8': 'Adtran Inc', 'F49651': 'NAKAYO Inc', '446246': 'Comat AG', '34FCB9': 'Hewlett Packard Enterprise', '70918F': 'Weber-Stephen Products LLC', 'D8E0E1': 'Samsung Electronics Co.,Ltd', '7C1015': 'Brilliant Home Technology, Inc.', 'D8C771': 'HUAWEI TECHNOLOGIES CO.,LTD', 'E02A82': 'Universal Global Scientific Industrial Co., Ltd.', 'B0F963': 'Hangzhou H3C Technologies Co., Limited', 'D490E0': 'Topcon Electronics GmbH & Co. KG', 'A84041': 'Dragino Technology Co., Limited', 'E02202': 'ARRIS Group, Inc.', 'D825B0': 'Rockeetech Systems Co.,Ltd.', '74614B': 'Chongqing Huijiatong Information Technology Co., Ltd.', '98D293': 'Google, Inc.', 'CCB8A8': 'AMPAK Technology, Inc.', '1077B0': 'Fiberhome Telecommunication Technologies Co.,LTD', 'F01DBC': 'Microsoft Corporation', '34049E': 'IEEE Registration Authority', '94FB29': 'Zebra Technologies Inc.', 'B0702D': 'Apple, Inc.', '6C19C0': 'Apple, Inc.', '00204F': 'DEUTSCHE AEROSPACE AG', '00DBDF': 'Intel Corporate', '94A04E': 'Bostex Technology Co., LTD', '8CE117': 'zte corporation', '688AF0': 'zte corporation', 'C0210D': 'SHENZHEN RF-LINK TECHNOLOGY CO.,LTD.', '4CE2F1': 'sclak srl', '504B5B': 'CONTROLtronic GmbH', 'B47447': 'CoreOS', '80D4A5': 'HUAWEI TECHNOLOGIES CO.,LTD', '04B0E7': 'HUAWEI TECHNOLOGIES CO.,LTD', '446A2E': 'HUAWEI TECHNOLOGIES CO.,LTD', '0C8DDB': 'Cisco Meraki', 'B0EE7B': 'Roku, Inc', 'AC587B': 'JCT Healthcare', '1062EB': 'D-Link International', '000894': 'InnoVISION Multimedia Ltd.', '480033': 'Technicolor CH USA Inc.', 'A06FAA': 'LG Innotek', '0026AB': 'Seiko Epson Corporation', 'FC10C6': 'Taicang T&W Electronics', '2C6FC9': 'Hon Hai Precision Ind. Co.,Ltd.', '58EF68': 'Belkin International Inc.', '000B4F': 'Verifone', '60C798': 'Verifone', 'C8662C': 'Beijing Haitai Fangyuan High Technology Co,.Ltd.', '8096CA': 'Hon Hai Precision Ind. Co.,Ltd.', '186571': 'Top Victory Electronics (Taiwan) Co., Ltd.', 'F83F51': 'Samsung Electronics Co.,Ltd', '50D753': 'CONELCOM GmbH', '0CC47A': 'Super Micro Computer, Inc.', '34D270': 'Amazon Technologies Inc.', '50795B': 'Interexport Telecomunicaciones S.A.', '0016D9': 'NINGBO BIRD CO.,LTD.', '6CA7FA': 'YOUNGBO ENGINEERING INC.', '8C7EB3': 'Lytro, Inc.', 'B4B384': 'ShenZhen Figigantic Electronic Co.,Ltd', '7828CA': 'Sonos, Inc.', '00C003': 'GLOBALNET COMMUNICATIONS', '00234A': 'Private', '2C402B': 'Smart iBlue Technology Limited', '5C6B4F': 'Hello Inc.', '2C9924': 'ARRIS Group, Inc.', 'D058A8': 'zte corporation', 'D071C4': 'zte corporation', 'A0CC2B': 'Murata Manufacturing Co., Ltd.', '5001D9': 'HUAWEI TECHNOLOGIES CO.,LTD', '00271C': 'MERCURY CORPORATION', 'E0D9E3': 'Eltex Enterprise Ltd.', '805EC0': 'YEALINK(XIAMEN) NETWORK TECHNOLOGY CO.,LTD.', '007B18': 'SENTRY Co., LTD.', '144D67': 'Zioncom Electronics (Shenzhen) Ltd.', '4CE173': 'IEEE Registration Authority', '0CD86C': 'SHENZHEN FAST TECHNOLOGIES CO.,LTD', '049790': 'Lartech telecom LLC', '28EED3': 'Shenzhen Super D Technology Co., Ltd', '24C44A': 'zte corporation', '98541B': 'Intel Corporate', '1C40E8': 'SHENZHEN PROGRESS&WIN TECHNOLOGY CO.,LTD', '0023D2': 'Inhand Electronics, Inc.', 'DC0B34': 'LG Electronics (Mobile Communications)', '404E36': 'HTC Corporation', 'A8E705': 'Fiberhome Telecommunication Technologies Co.,LTD', '9840BB': 'Dell Inc.', '0060D6': 'NovAtel Inc.', '503AA0': 'SHENZHEN MERCURY COMMUNICATION TECHNOLOGIES CO.,LTD.', 'B0958E': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'C025E9': 'TP-LINK TECHNOLOGIES CO.,LTD.', '802AA8': 'Ubiquiti Networks Inc.', 'F09FC2': 'Ubiquiti Networks Inc.', '188B15': 'ShenZhen ZhongRuiJing Technology co.,LTD', '788A20': 'Ubiquiti Networks Inc.', '886B0F': 'Bluegiga Technologies OY', 'AC84C9': 'Sagemcom Broadband SAS', '245CBF': 'NCSE', '2C3361': 'Apple, Inc.', '60A4D0': 'Samsung Electronics Co.,Ltd', '008701': 'Samsung Electronics Co.,Ltd', '5C9960': 'Samsung Electronics Co.,Ltd', '9C62AB': 'Sumavision Technologies Co.,Ltd', 'C8F946': 'LOCOSYS Technology Inc.', '487B6B': 'HUAWEI TECHNOLOGIES CO.,LTD', '883FD3': 'HUAWEI TECHNOLOGIES CO.,LTD', '240D65': 'Shenzhen Vsun Communication Technology Co., Ltd.', '000B14': 'ViewSonic Corporation', 'C8028F': 'Nova Electronics (Shanghai) Co., Ltd.', 'A46011': 'Verifone', '5CA933': 'Luma Home', '00137E': 'CorEdge Networks, Inc.', 'D814D6': 'SURE SYSTEM Co Ltd', '6CEFC6': 'SHENZHEN TWOWING TECHNOLOGIES CO.,LTD.', '101DC0': 'Samsung Electronics Co.,Ltd', '002341': 'Vanderbilt International (SWE) AB ', '407C7D': 'Nokia', '24590B': 'White Sky Inc. Limited', '68EBAE': 'Samsung Electronics Co.,Ltd', '444E1A': 'Samsung Electronics Co.,Ltd', '143365': 'TEM Mobile Limited', '801844': 'Dell Inc.', '78471D': 'Samsung Electronics Co.,Ltd', 'A07591': 'Samsung Electronics Co.,Ltd', '0CDFA4': 'Samsung Electronics Co.,Ltd', 'B072BF': 'Murata Manufacturing Co., Ltd.', '701DC4': 'NorthStar Battery Company, LLC', '64DAA0': 'Robert Bosch Smart Home GmbH', '14B837': 'Shenzhen YOUHUA Technology Co., Ltd', '5C8613': 'Beijing Zhoenet Technology Co., Ltd', 'CC7314': 'HONG KONG WHEATEK TECHNOLOGY LIMITED', 'B8EE65': 'Liteon Technology Corporation', '985BB0': 'KMDATA INC.', 'E006E6': 'Hon Hai Precision Ind. Co.,Ltd.', 'BC8556': 'Hon Hai Precision Ind. Co.,Ltd.', '342387': 'Hon Hai Precision Ind. Co.,Ltd.', '002637': 'SAMSUNG ELECTRO MECHANICS CO., LTD.', '002119': 'SAMSUNG ELECTRO MECHANICS CO., LTD.', 'F4D9FB': 'Samsung Electronics Co.,Ltd', '3C6200': 'Samsung Electronics Co.,Ltd', 'C417FE': 'Hon Hai Precision Ind. Co.,Ltd.', '9439E5': 'Hon Hai Precision Ind. Co.,Ltd.', '642737': 'Hon Hai Precision Ind. Co.,Ltd.', 'A41731': 'Hon Hai Precision Ind. Co.,Ltd.', '5CA39D': 'SAMSUNG ELECTRO MECHANICS CO., LTD.', '90187C': 'SAMSUNG ELECTRO MECHANICS CO., LTD.', '50CCF8': 'SAMSUNG ELECTRO MECHANICS CO., LTD.', '00223B': 'Communication Networks, LLC', 'C0F8DA': 'Hon Hai Precision Ind. Co.,Ltd.', '0011FF': 'Digitro Tecnologia Ltda', '001B94': 'T.E.M.A. S.p.A.', 'F0F002': 'Hon Hai Precision Ind. Co.,Ltd.', 'C0CB38': 'Hon Hai Precision Ind. Co.,Ltd.', 'F07BCB': 'Hon Hai Precision Ind. Co.,Ltd.', '50B7C3': 'Samsung Electronics Co.,Ltd', '1C5A3E': 'Samsung Electronics Co.,Ltd', 'A02195': 'Samsung Electronics Co.,Ltd', 'B07870': 'Wi-NEXT, Inc.', 'E47CF9': 'Samsung Electronics Co.,Ltd', '4844F7': 'Samsung Electronics Co.,Ltd', '001377': 'Samsung Electronics Co.,Ltd', '002454': 'Samsung Electronics Co.,Ltd', 'E81132': 'Samsung Electronics Co.,Ltd', 'C06599': 'Samsung Electronics Co.,Ltd', 'BC79AD': 'Samsung Electronics Co.,Ltd', '4C3C16': 'Samsung Electronics Co.,Ltd', '0073E0': 'Samsung Electronics Co.,Ltd', '0017D5': 'Samsung Electronics Co.,Ltd', '001E7D': 'Samsung Electronics Co.,Ltd', '001DF6': 'Samsung Electronics Co.,Ltd', 'F008F1': 'Samsung Electronics Co.,Ltd', '58C38B': 'Samsung Electronics Co.,Ltd', '00E3B2': 'Samsung Electronics Co.,Ltd', '301966': 'Samsung Electronics Co.,Ltd', 'F0E77E': 'Samsung Electronics Co.,Ltd', '94350A': 'Samsung Electronics Co.,Ltd', '001D25': 'Samsung Electronics Co.,Ltd', 'E4C1F1': 'SHENZHEN SPOTMAU INFORMATION TECHNOLIGY CO., Ltd ', '240AC4': 'Espressif Inc.', '343111': 'Samsung Electronics Co.,Ltd', '08FD0E': 'Samsung Electronics Co.,Ltd', '041BBA': 'Samsung Electronics Co.,Ltd', '889B39': 'Samsung Electronics Co.,Ltd', 'E432CB': 'Samsung Electronics Co.,Ltd', 'BC8CCD': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', 'D022BE': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', 'EC9BF3': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', 'F409D8': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', '10D542': 'Samsung Electronics Co.,Ltd', 'A0821F': 'Samsung Electronics Co.,Ltd', 'F06BCA': 'Samsung Electronics Co.,Ltd', 'AC3613': 'Samsung Electronics Co.,Ltd', '002611': 'Licera AB', '005094': 'ARRIS Group, Inc.', 'E0B7B1': 'ARRIS Group, Inc.', 'D82522': 'ARRIS Group, Inc.', 'F0038C': 'AzureWave Technology Inc.', '18D276': 'HUAWEI TECHNOLOGIES CO.,LTD', '005218': 'Wuxi Keboda Electron Co.Ltd', '001E81': 'CNB Technology Inc.', '7CA97D': 'Objenious', 'A8A648': 'Qingdao Hisense Communications Co.,Ltd.', '985DAD': 'Texas Instruments', 'D43639': 'Texas Instruments', 'BC282C': 'e-Smart Systems Pvt. Ltd', 'A40DBC': 'Xiamen Intretech Inc.', '84EF18': 'Intel Corporate', '84C1C1': 'Juniper Networks', 'A81B6A': 'Texas Instruments', '343DC4': 'BUFFALO.INC', 'B0F893': 'Shanghai MXCHIP Information Technology Co., Ltd.', 'C411E0': 'Bull Group Co., Ltd', '28C87A': 'ARRIS Group, Inc.', '48FD8E': 'HUAWEI TECHNOLOGIES CO.,LTD', '7C0623': 'Ultra Electronics Sonar System Division', '28AC67': 'Mach Power, Rappresentanze Internazionali s.r.l.', '14825B': 'Hefei Radio Communication Technology Co., Ltd ', '641269': 'ARRIS Group, Inc.', '0002C9': 'Mellanox Technologies, Inc.', '080051': 'ExperData', '00126C': 'Visonic Technologies 1993 Ltd.', 'AC6175': 'HUAWEI TECHNOLOGIES CO.,LTD', '244427': 'HUAWEI TECHNOLOGIES CO.,LTD', '0080C7': 'XIRCOM', '000138': 'XAVi Technologies Corp.', '00166D': 'Yulong Computer Telecommunication Scientific (Shenzhen) Co.,Ltd', '3C9157': 'Yulong Computer Telecommunication Scientific (Shenzhen) Co.,Ltd', '0000D8': 'Novell, Inc.', '001F46': 'Nortel Networks', '003093': 'Sonnet Technologies, Inc', '00034B': 'Nortel Networks', '002561': 'ProCurve Networking by HP', '008058': 'PRINTER SYSTEMS CORP.', '00157D': 'POSDATA', '4849C7': 'Samsung Electronics Co.,Ltd', '849866': 'Samsung Electronics Co.,Ltd', '001C9C': 'Nortel Networks', '001B25': 'Nortel Networks', '0019E1': 'Nortel Networks', '001D42': 'Nortel Networks', '00140D': 'Nortel Networks', '000E40': 'Nortel Networks', 'FCB0C4': 'Shanghai DareGlobal Technologies Co.,Ltd', 'A89DD2': 'Shanghai DareGlobal Technologies Co.,Ltd', '00E00F': 'Shanghai Baud Data Communication Co.,Ltd.', '28BE03': 'TCT mobile ltd', '903AE6': 'PARROT SA', 'A098ED': 'Shandong Intelligent Optical Communication Development Co., Ltd.', '000EF4': 'Kasda Networks Inc', '00167A': 'Skyworth Overseas Development Ltd.', 'A42940': 'Shenzhen YOUHUA Technology Co., Ltd', 'E4A387': 'Control Solutions LLC', '1880F5': 'Alcatel-Lucent Shanghai Bell Co., Ltd', '10E878': 'Nokia', '10F96F': 'LG Electronics (Mobile Communications)', 'C4438F': 'LG Electronics (Mobile Communications)', 'A09169': 'LG Electronics (Mobile Communications)', '286C07': 'XIAOMI Electronics,CO.,LTD', '002280': 'A2B Electronics AB', '404AD4': 'Widex A/S', '9893CC': 'LG ELECTRONICS INC', '3CCD93': 'LG ELECTRONICS INC', '2021A5': 'LG Electronics (Mobile Communications)', '6CD68A': 'LG Electronics (Mobile Communications)', 'CC79CF': 'SHENZHEN RF-LINK TECHNOLOGY CO.,LTD.', '001925': 'Intelicis Corporation', '9476B7': 'Samsung Electronics Co.,Ltd', '2C54CF': 'LG Electronics (Mobile Communications)', '485929': 'LG Electronics (Mobile Communications)', '58A2B5': 'LG Electronics (Mobile Communications)', '002483': 'LG Electronics (Mobile Communications)', '001FE3': 'LG Electronics (Mobile Communications)', 'F0421C': 'Intel Corporate', '000F62': 'Alcatel Bell Space N.V.', '001CD8': 'BlueAnt Wireless', '0019AB': 'Raycom CO ., LTD', '4C334E': 'HIGHTECH', '7C70BC': 'IEEE Registration Authority', 'E81863': 'IEEE Registration Authority', '2CD141': 'IEEE Registration Authority', '3C39E7': 'IEEE Registration Authority', 'BC6641': 'IEEE Registration Authority', '80E4DA': 'IEEE Registration Authority', '885D90': 'IEEE Registration Authority', 'C88ED1': 'IEEE Registration Authority', 'B01F81': 'IEEE Registration Authority', '549A11': 'IEEE Registration Authority', 'B8D812': 'IEEE Registration Authority', '1C21D1': 'IEEE Registration Authority', '283638': 'IEEE Registration Authority', 'F485C6': 'FDT Technologies', '60EB69': 'QUANTA COMPUTER INC.', 'C80AA9': 'QUANTA COMPUTER INC.', 'D404FF': 'Juniper Networks', 'C8755B': 'Quantify Technology Pty. Ltd.', '001B24': 'QUANTA COMPUTER INC.', '00C09F': 'QUANTA COMPUTER INC.', 'C45444': 'QUANTA COMPUTER INC.', '00269E': 'QUANTA COMPUTER INC.', '88124E': 'Qualcomm Inc.', '001428': 'Vocollect Inc', '006B9E': 'Vizio, Inc', '4C6641': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', '5CA86A': 'HUAWEI TECHNOLOGIES CO.,LTD', '001B32': 'QLogic Corporation', '0017CA': 'Qisda Corporation', '70F395': 'Universal Global Scientific Industrial Co., Ltd.', '48F7C0': 'Technicolor CH USA Inc.', '0015B7': 'Toshiba', 'E89D87': 'Toshiba', 'E09579': 'ORTHOsoft inc, d/b/a Zimmer CAS', 'A0ADA1': 'JMR Electronics, Inc', 'BCC00F': 'Fiberhome Telecommunication Technologies Co.,LTD', '9CA5C0': 'vivo Mobile Communication Co., Ltd.', '80C6AB': 'Technicolor CH USA Inc.', '90A4DE': 'Wistron Neweb Corporation', '70E284': 'Wistron Infocomm (Zhongshan) Corporation', 'A854B2': 'Wistron Neweb Corporation', '3044A1': 'Shanghai Nanchao Information Technology', 'CC03FA': 'Technicolor CH USA Inc.', 'E0ACF1': 'Cisco Systems, Inc', '00015B': 'ITALTEL S.p.A/RF-UP-I', '00A0A8': 'RENEX CORPORATION', '00C0AB': 'Telco Systems, Inc. ', '0023F8': 'Zyxel Communications Corporation', '0019CB': 'Zyxel Communications Corporation', '2C094D': 'Raptor Engineering, LLC', 'AC3743': 'HTC Corporation', '001D7E': 'Cisco-Linksys, LLC', 'E4FB8F': 'MOBIWIRE MOBILES (NINGBO) CO.,LTD', '10BD55': 'Q-Lab Corporation', 'C449BB': 'MITSUMI ELECTRIC CO.,LTD.', 'FC2D5E': 'zte corporation', 'B40418': 'Smartchip Integrated Inc.', '90CF7D': 'Qingdao Hisense Communications Co.,Ltd.', 'F40A4A': 'INDUSNET Communication Technology Co.,LTD', 'F85A00': 'Sanford LP', 'FC55DC': 'Baltic Latvian Universal Electronics LLC', '08010F': 'SICHUAN TIANYI COMHEART TELECOMCO.,LTD', '941882': 'Hewlett Packard Enterprise', '6038E0': 'Belkin International Inc.', '8850DD': 'Infiniband Trade Association ', '002550': 'Riverbed Technology, Inc.', 'D0B2C4': 'Technicolor CH USA Inc.', '50AB3E': 'Qibixx AG', '3876CA': 'Shenzhen Smart Intelligent Technology Co.Ltd', '042758': 'HUAWEI TECHNOLOGIES CO.,LTD', '9CE374': 'HUAWEI TECHNOLOGIES CO.,LTD', '8CD2E9': 'YOKOTE SEIKO CO., LTD.', 'B8BBAF': 'Samsung Electronics Co.,Ltd', '60C5AD': 'Samsung Electronics Co.,Ltd', '442C05': 'AMPAK Technology, Inc.', '8C897A': 'AUGTEK', 'F845AD': 'Konka Group Co., Ltd.', '000FE2': 'Hangzhou H3C Technologies Co., Limited', '80F62E': 'Hangzhou H3C Technologies Co., Limited', '608D17': 'Sentrus Government Systems Division, Inc', 'ECADB8': 'Apple, Inc.', '9801A7': 'Apple, Inc.', '6879ED': 'SHARP Corporation', '002382': 'Lih Rong electronic Enterprise Co., Ltd.', '24F094': 'Apple, Inc.', '086D41': 'Apple, Inc.', 'B4D5BD': 'Intel Corporate', '98AA3C': 'Will i-tech Co., Ltd.', 'BCAD28': 'Hangzhou Hikvision Digital Technology Co.,Ltd.', 'F4911E': 'ZHUHAI EWPE INFORMATION TECHNOLOGY INC', '002552': 'VXi Corporation', '6CE983': 'Gastron Co., LTD.', '28E31F': 'Xiaomi Communications Co Ltd', '989096': 'Dell Inc.', 'DC3752': 'GE', 'DCD916': 'HUAWEI TECHNOLOGIES CO.,LTD', '00022E': 'TEAC Corp. R& D', '0060B0': 'Hewlett Packard', '7C738B': 'Cocoon Alarm Ltd', 'F80F84': 'Natural Security SAS', '44A42D': 'TCT mobile ltd', '70F96D': 'Hangzhou H3C Technologies Co., Limited', 'BC6A44': 'Commend International GmbH', 'F0EE58': 'PACE Telematics GmbH', '4CA003': 'T-21 Technologies LLC', '083FBC': 'zte corporation', '00C0F0': 'Kingston Technology Company, Inc.', '943BB1': 'Kaonmedia CO., LTD.', '0018D7': 'JAVAD GNSS, Inc.', '001F09': 'Jastec', 'AC620D': 'Jabil Circuit(Wuxi) Co.,Ltd', '08000D': 'International Computers, Ltd', '1C7370': 'Neotech', '30E37A': 'Intel Corporate', '0000C9': 'Emulex Corporation', '0040AA': 'Valmet Automation', 'D0B0CD': 'Moen', '7050AF': 'BSkyB Ltd', 'F4EF9E': 'SGSG SCIENCE & TECHNOLOGY CO. LTD', '1C740D': 'Zyxel Communications Corporation', '603ECA': 'Cambridge Medical Robotics Ltd', '001F1F': 'Edimax Technology Co. Ltd.', '00020E': 'ECI Telecom Ltd.', '200A5E': 'Xiangshan Giant Eagle Technology Developing Co., Ltd.', '9C741A': 'HUAWEI TECHNOLOGIES CO.,LTD', 'E4A8B6': 'HUAWEI TECHNOLOGIES CO.,LTD', '244C07': 'HUAWEI TECHNOLOGIES CO.,LTD', '746FF7': 'Wistron Neweb Corporation', 'B8AEED': 'Elitegroup Computer Systems Co.,Ltd.', '000DB0': 'Olym-tech Co.,Ltd.', '30F6B9': 'Ecocentric Energy', '847BEB': 'Dell Inc.', '54511B': 'HUAWEI TECHNOLOGIES CO.,LTD', '68536C': 'SPnS Co.,Ltd', '1CEA1B': 'Nokia', 'D4612E': 'HUAWEI TECHNOLOGIES CO.,LTD', 'B0E2E5': 'Fiberhome Telecommunication Technologies Co.,LTD', '40476A': 'AG Acquisition Corp. d.b.a. ASTRO Gaming', '001FA7': 'Sony Interactive Entertainment Inc.', '9046A2': 'Tedipay UK Ltd', '6479A7': 'Phison Electronics Corp.', 'CCB11A': 'Samsung Electronics Co.,Ltd', '703C03': 'RadiAnt Co.,Ltd', '00C164': 'Cisco Systems, Inc', 'DC2DCB': 'Beijing Unis HengYue Technology Co., Ltd.', '2C9662': 'Invenit BV', 'CCD3E2': 'Jiangsu Yinhe Electronics Co.,Ltd.', 'E4FAED': 'Samsung Electronics Co.,Ltd', '288335': 'Samsung Electronics Co.,Ltd', 'DCCF96': 'Samsung Electronics Co.,Ltd', 'AC44F2': 'YAMAHA CORPORATION', '1C5F2B': 'D-Link International', '1C98EC': 'Hewlett Packard Enterprise', '70661B': 'Sonova AG', 'B07FB9': 'NETGEAR', '047E4A': 'moobox CO., Ltd.', '0080E5': 'NetApp', '9C5C8E': 'ASUSTek COMPUTER INC.', 'C88722': 'Lumenpulse', '84683E': 'Intel Corporate', 'E0CDFD': 'Beijing E3Control Technology Co, LTD', '000895': 'DIRC Technologie GmbH & Co.KG', '60ACC8': 'KunTeng Inc.', 'CCB3AB': 'shenzhen Biocare Bio-Medical Equipment Co.,Ltd.', 'E4B318': 'Intel Corporate', '743E2B': 'Ruckus Wireless', 'E0C767': 'Apple, Inc.', '80ED2C': 'Apple, Inc.', 'F03404': 'TCT mobile ltd', '80D160': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '30785C': 'Partow Tamas Novin (Parman)', '246968': 'TP-LINK TECHNOLOGIES CO.,LTD.', '8CA2FD': 'Starry, Inc.', '84BA3B': 'CANON INC.', '000585': 'Juniper Networks', '204E71': 'Juniper Networks', '00194F': 'Nokia Danmark A/S', '00BD3A': 'Nokia Corporation', '80501B': 'Nokia Corporation', 'A04E04': 'Nokia Corporation', '001262': 'Nokia Danmark A/S', '0014A7': 'Nokia Danmark A/S', '0015A0': 'Nokia Danmark A/S', '0016BC': 'Nokia Danmark A/S', '00174B': 'Nokia Danmark A/S', '002669': 'Nokia Danmark A/S', 'AC61EA': 'Apple, Inc.', '38B54D': 'Apple, Inc.', '1C5CF2': 'Apple, Inc.', 'A87E33': 'Nokia Danmark A/S', '002403': 'Nokia Danmark A/S', '002404': 'Nokia Danmark A/S', '0019B7': 'Nokia Danmark A/S', '0017B0': 'Nokia Danmark A/S', '002109': 'Nokia Danmark A/S', '002108': 'Nokia Danmark A/S', '001B33': 'Nokia Danmark A/S', '0015DE': 'Nokia Danmark A/S', '0002EE': 'Nokia Danmark A/S', 'D8F710': 'Libre Wireless Technologies Inc.', '3C591E': 'TCL King Electrical Appliances (Huizhou) Co., Ltd', 'C43655': 'Shenzhen Fenglian Technology Co., Ltd.', 'E0B9E5': 'Technicolor', '0030DA': 'Comtrend Corporation', '64680C': 'Comtrend Corporation', '3872C0': 'Comtrend Corporation', 'A80600': 'Samsung Electronics Co.,Ltd', '002682': 'Gemtek Technology Co., Ltd.', '0009E1': 'Gemtek Technology Co., Ltd.', '14C126': 'Nokia Corporation', '600194': 'Espressif Inc.', 'F05A09': 'Samsung Electronics Co.,Ltd', '503275': 'Samsung Electronics Co.,Ltd', '08FC88': 'Samsung Electronics Co.,Ltd', '0270B3': 'DATA RECALL LTD.', '000136': 'CyberTAN Technology Inc.', 'D04D2C': 'Roku, Inc.', 'B0A737': 'Roku, Inc.', '140C76': 'FREEBOX SAS', '001BE9': 'Broadcom', '0019C7': 'Cambridge Industries(Group) Co.,Ltd.', '70D931': 'Cambridge Industries(Group) Co.,Ltd.', '029D8E': 'CARDIAC RECORDERS, INC.', '00402A': 'Canoga Perkins Corporation', 'A4C7DE': 'Cambridge Industries(Group) Co.,Ltd.', 'D8B8F6': 'Nantworks', '008077': 'Brother industries, LTD.', '24F5AA': 'Samsung Electronics Co.,Ltd', '988389': 'Samsung Electronics Co.,Ltd', '84A466': 'Samsung Electronics Co.,Ltd', 'C4576E': 'Samsung Electronics Co.,Ltd', '508569': 'Samsung Electronics Co.,Ltd', '0060BB': 'Cabletron Systems, Inc.', 'F8D0BD': 'Samsung Electronics Co.,Ltd', '78595E': 'Samsung Electronics Co.,Ltd', '0C1420': 'Samsung Electronics Co.,Ltd', '94B10A': 'Samsung Electronics Co.,Ltd', '3CBBFD': 'Samsung Electronics Co.,Ltd', 'A48431': 'Samsung Electronics Co.,Ltd', 'A0B4A5': 'Samsung Electronics Co.,Ltd', 'E4F8EF': 'Samsung Electronics Co.,Ltd', 'DC446D': 'Allwinner Technology Co., Ltd', '745AAA': 'HUAWEI TECHNOLOGIES CO.,LTD', '04FE8D': 'HUAWEI TECHNOLOGIES CO.,LTD', '001333': 'BaudTec Corporation', '58671A': 'Barnes&Noble', '002675': 'Aztech Electronics Pte Ltd', '0024FE': 'AVM GmbH', 'C02506': 'AVM GmbH', '405D82': 'NETGEAR', 'DCEF09': 'NETGEAR', 'DC64B8': 'Shenzhen JingHanDa Electronics Co.Ltd', '000D92': 'ARIMA Communications Corp.', '002163': 'ASKEY COMPUTER CORP', 'A8D3F7': 'Arcadyan Technology Corporation', '4C60DE': 'NETGEAR', 'C43DC7': 'NETGEAR', '489D24': 'BlackBerry RTS', '08BD43': 'NETGEAR', '44EE02': 'MTI Ltd.', '5856E8': 'ARRIS Group, Inc.', 'F80BBE': 'ARRIS Group, Inc.', 'DC4517': 'ARRIS Group, Inc.', 'C8AA21': 'ARRIS Group, Inc.', '0017EE': 'ARRIS Group, Inc.', '00111A': 'ARRIS Group, Inc.', '000F9F': 'ARRIS Group, Inc.', '0004BD': 'ARRIS Group, Inc.', '002642': 'ARRIS Group, Inc.', '0024A1': 'ARRIS Group, Inc.', '002210': 'ARRIS Group, Inc.', '0022B4': 'ARRIS Group, Inc.', '00149A': 'ARRIS Group, Inc.', '0014E8': 'ARRIS Group, Inc.', '0019C0': 'ARRIS Group, Inc.', '00E06F': 'ARRIS Group, Inc.', '8096B1': 'ARRIS Group, Inc.', '707E43': 'ARRIS Group, Inc.', '00152F': 'ARRIS Group, Inc.', '001FC4': 'ARRIS Group, Inc.', '001CFB': 'ARRIS Group, Inc.', '002395': 'ARRIS Group, Inc.', '0023AF': 'ARRIS Group, Inc.', 'F87B7A': 'ARRIS Group, Inc.', '0000F4': 'Allied Telesis, Inc.', '001577': 'Allied Telesis, Inc.', '001AEB': 'Allied Telesis R&D Center K.K.', '703C39': 'SEAWING Kft', '9097D5': 'Espressif Inc.', 'ACD074': 'Espressif Inc.', '38E3C5': 'Taicang T&W Electronics', '0015CE': 'ARRIS Group, Inc.', '0015A2': 'ARRIS Group, Inc.', '0015A3': 'ARRIS Group, Inc.', '0015A4': 'ARRIS Group, Inc.', '0000CA': 'ARRIS Group, Inc.', '709E29': 'Sony Interactive Entertainment Inc.', 'A4DB30': 'Liteon Technology Corporation', '40F02F': 'Liteon Technology Corporation', '74C246': 'Amazon Technologies Inc.', '000FA3': 'Alpha Networks Inc.', '001D6A': 'Alpha Networks Inc.', '002345': 'Sony Mobile Communications Inc', '6C0E0D': 'Sony Mobile Communications Inc', '6C23B9': 'Sony Mobile Communications Inc', '3017C8': 'Sony Mobile Communications Inc', '0012EE': 'Sony Mobile Communications Inc', '001620': 'Sony Mobile Communications Inc', '001963': 'Sony Mobile Communications Inc', '001FE4': 'Sony Mobile Communications Inc', '205476': 'Sony Mobile Communications Inc', '001A80': 'Sony Corporation', '8841FC': 'AirTies Wireless Networks', '0030D3': 'Agilent Technologies, Inc.', '00A02F': 'ADB Broadband Italia', '98743D': 'Shenzhen Jun Kai Hengye Technology Co. Ltd', 'A0F459': 'FN-LINK TECHNOLOGY LIMITED', '586356': 'FN-LINK TECHNOLOGY LIMITED', '8CB864': 'AcSiP Technology Corp.', '5CE2F4': 'AcSiP Technology Corp.', 'B8616F': 'Accton Technology Corp', '0012CF': 'Accton Technology Corp', '0030F1': 'Accton Technology Corp', '705A0F': 'Hewlett Packard', '4495FA': 'Qingdao Santong Digital Technology Co.Ltd', '0025D3': 'AzureWave Technology Inc.', '1C4BD6': 'AzureWave Technology Inc.', '08A95A': 'AzureWave Technology Inc.', '94DBC9': 'AzureWave Technology Inc.', '240A64': 'AzureWave Technology Inc.', '40E230': 'AzureWave Technology Inc.', '80D21D': 'AzureWave Technology Inc.', 'D4D184': 'ADB Broadband Italia', 'A04FD4': 'ADB Broadband Italia', 'D00ED9': 'Taicang T&W Electronics', '541473': ' Wingtech Group (HongKong)Limited', '8086F2': 'Intel Corporate', 'E09467': 'Intel Corporate', '08D40C': 'Intel Corporate', '6C8814': 'Intel Corporate', '303A64': 'Intel Corporate', 'ACFDCE': 'Intel Corporate', '7CCCB8': 'Intel Corporate', 'F40669': 'Intel Corporate', '001DE1': 'Intel Corporate', '90E2BA': 'Intel Corporate', '0026C7': 'Intel Corporate', '0026C6': 'Intel Corporate', '0CCC26': 'Airenetworks', 'E09D31': 'Intel Corporate', '88532E': 'Intel Corporate', '74C99A': 'Ericsson AB', '5CC213': 'Fr. Sauter AG', '28101B': 'MagnaCom', '001676': 'Intel Corporate', '0016EA': 'Intel Corporate', '001B77': 'Intel Corporate', '001CC0': 'Intel Corporate', '104A7D': 'Intel Corporate', '001AA0': 'Dell Inc.', '0019B9': 'Dell Inc.', '00B0D0': 'Dell Inc.', '00C04F': 'Dell Inc.', 'B07994': 'Motorola Mobility LLC, a Lenovo Company', 'A470D6': 'Motorola Mobility LLC, a Lenovo Company', '74867A': 'Dell Inc.', '180373': 'Dell Inc.', '14FEB5': 'Dell Inc.', '782BCB': 'Dell Inc.', '001F3B': 'Intel Corporate', '00215D': 'Intel Corporate', '00216A': 'Intel Corporate', '001C23': 'Dell Inc.', 'A4BADB': 'Dell Inc.', '002564': 'Dell Inc.', 'A41F72': 'Dell Inc.', 'C46699': 'vivo Mobile Communication Co., Ltd.', 'C8F230': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '8C0EE3': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'C8CD72': 'Sagemcom Broadband SAS', 'F82C18': '2Wire Inc', '18017D': 'Harbin Arteor technology co., LTD', '001556': 'Sagemcom Broadband SAS', 'C0D044': 'Sagemcom Broadband SAS', 'A01B29': 'Sagemcom Broadband SAS', '74E14A': 'IEEE Registration Authority', '0CEFAF': 'IEEE Registration Authority', 'F80278': 'IEEE Registration Authority', 'A0BB3E': 'IEEE Registration Authority', '884AEA': 'Texas Instruments', '0022A4': '2Wire Inc', '982CBE': '2Wire Inc', '640F28': '2Wire Inc', '00123F': 'Dell Inc.', '000BDB': 'Dell Inc.', '204747': 'Dell Inc.', '001495': '2Wire Inc', '348AAE': 'Sagemcom Broadband SAS', '7C03D8': 'Sagemcom Broadband SAS', 'C0AC54': 'Sagemcom Broadband SAS', '2C3996': 'Sagemcom Broadband SAS', 'F08261': 'Sagemcom Broadband SAS', '0030C5': 'CADENCE DESIGN SYSTEMS, INC.', 'D08CB5': 'Texas Instruments', '00182F': 'Texas Instruments', '0017EA': 'Texas Instruments', '0021BA': 'Texas Instruments', 'BC0DA5': 'Texas Instruments', 'CC8CE3': 'Texas Instruments', 'E0D7BA': 'Texas Instruments', '1CE2CC': 'Texas Instruments', '985945': 'Texas Instruments', '944452': 'Belkin International Inc.', 'B0B448': 'Texas Instruments', 'D494A1': 'Texas Instruments', '0014BF': 'Cisco-Linksys, LLC', 'CCB255': 'D-Link International', '28107B': 'D-Link International', 'FC7516': 'D-Link International', '84C9B2': 'D-Link International', 'C8D3A3': 'D-Link International', '3CBB73': 'Shenzhen Xinguodu Technology Co., Ltd.', '0C47C9': 'Amazon Technologies Inc.', '0050BA': 'D-Link Corporation', '00179A': 'D-Link Corporation', '001CF0': 'D-Link Corporation', '001E58': 'D-Link Corporation', '0022B0': 'D-Link Corporation', '002401': 'D-Link Corporation', '1CAFF7': 'D-Link International', '14D64D': 'D-Link International', '9094E4': 'D-Link International', 'B499BA': 'Hewlett Packard', '047863': 'Shanghai MXCHIP Information Technology Co., Ltd.', '409F87': 'Jide Technology (Hong Kong) Limited', '0CF9C0': 'BSkyB Ltd', '4CFF12': 'Fuze Entertainment Co., ltd', 'AC9A22': 'NXP Semiconductors', '287CDB': 'Hefei Toycloud Technology Co.,ltd', '806AB0': 'Shenzhen TINNO Mobile Technology Corp.', '48AD08': 'HUAWEI TECHNOLOGIES CO.,LTD', '4CFB45': 'HUAWEI TECHNOLOGIES CO.,LTD', '009ACD': 'HUAWEI TECHNOLOGIES CO.,LTD', '3C5AB4': 'Google, Inc.', 'F4F5E8': 'Google, Inc.', '94EB2C': 'Google, Inc.', '0CC731': 'Currant, Inc.', '70B3D5': 'IEEE Registration Authority', '28ED6A': 'Apple, Inc.', 'C056E3': 'Hangzhou Hikvision Digital Technology Co.,Ltd.', '001977': 'Aerohive Networks Inc.', '08EA44': 'Aerohive Networks Inc.', 'EC3EF7': 'Juniper Networks', '0CF893': 'ARRIS Group, Inc.', '3CDFA9': 'ARRIS Group, Inc.', '8461A0': 'ARRIS Group, Inc.', '0015D1': 'ARRIS Group, Inc.', '001DD0': 'ARRIS Group, Inc.', '001DD3': 'ARRIS Group, Inc.', 'ACB313': 'ARRIS Group, Inc.', '38F23E': 'Microsoft Mobile Oy', 'E4F89C': 'Intel Corporate', '6CA100': 'Intel Corporate', '2C4138': 'Hewlett Packard', '441EA1': 'Hewlett Packard', '78E7D1': 'Hewlett Packard', '5C8FE0': 'ARRIS Group, Inc.', '90C792': 'ARRIS Group, Inc.', 'BCCAB5': 'ARRIS Group, Inc.', 'D039B3': 'ARRIS Group, Inc.', '000FCC': 'ARRIS Group, Inc.', '0012F0': 'Intel Corporate', '000740': 'BUFFALO.INC', '0024A5': 'BUFFALO.INC', 'CCE1D5': 'BUFFALO.INC', 'A402B9': 'Intel Corporate', 'DC5360': 'Intel Corporate', '001CC4': 'Hewlett Packard', '001E0B': 'Hewlett Packard', '002264': 'Hewlett Packard', '0025B3': 'Hewlett Packard', '000CF1': 'Intel Corporation', '784859': 'Hewlett Packard', '58DC6D': 'Exceptional Innovation, Inc.', '902155': 'HTC Corporation', '643150': 'Hewlett Packard', '7CB15D': 'HUAWEI TECHNOLOGIES CO.,LTD', '00265E': 'Hon Hai Precision Ind. Co.,Ltd.', '00242C': 'Hon Hai Precision Ind. Co.,Ltd.', 'D8B377': 'HTC Corporation', 'B0F1A3': 'Fengfan (BeiJing) Technology Co., Ltd. ', '3464A9': 'Hewlett Packard', '3863BB': 'Hewlett Packard', '5CB901': 'Hewlett Packard', 'DC4A3E': 'Hewlett Packard', 'B05ADA': 'Hewlett Packard', '001083': 'Hewlett Packard', 'A0B3CC': 'Hewlett Packard', 'ECB1D7': 'Hewlett Packard', '9CB654': 'Hewlett Packard', '6C3BE5': 'Hewlett Packard', 'B0AA36': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '784B87': 'Murata Manufacturing Co., Ltd.', 'E4CE02': 'WyreStorm Technologies Ltd', '40F308': 'Murata Manufacturing Co., Ltd.', '808917': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'A09347': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'E8BBA8': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '942CB3': 'HUMAX Co., Ltd.', '002719': 'TP-LINK TECHNOLOGIES CO.,LTD.', '40169F': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'F4EC38': 'TP-LINK TECHNOLOGIES CO.,LTD.', '140467': 'SNK Technologies Co.,Ltd.', '8030DC': 'Texas Instruments', 'A4D578': 'Texas Instruments', '14CF92': 'TP-LINK TECHNOLOGIES CO.,LTD.', '20DCE6': 'TP-LINK TECHNOLOGIES CO.,LTD.', '14CC20': 'TP-LINK TECHNOLOGIES CO.,LTD.', '246081': 'razberi technologies', 'A4D18C': 'Apple, Inc.', '90F652': 'TP-LINK TECHNOLOGIES CO.,LTD.', '241EEB': 'Apple, Inc.', 'CC25EF': 'Apple, Inc.', '88CF98': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F80D43': 'Hon Hai Precision Ind. Co.,Ltd.', '38F889': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D07AB5': 'HUAWEI TECHNOLOGIES CO.,LTD', '0019C6': 'zte corporation', '001DD9': 'Hon Hai Precision Ind. Co.,Ltd.', '00197D': 'Hon Hai Precision Ind. Co.,Ltd.', '7C4CA5': 'BSkyB Ltd', 'CC4E24': 'Brocade Communications Systems, Inc.', '00E052': 'Brocade Communications Systems, Inc.', '0014A4': 'Hon Hai Precision Ind. Co.,Ltd.', '78DD08': 'Hon Hai Precision Ind. Co.,Ltd.', '9CD21E': 'Hon Hai Precision Ind. Co.,Ltd.', 'B43052': 'HUAWEI TECHNOLOGIES CO.,LTD', '80D09B': 'HUAWEI TECHNOLOGIES CO.,LTD', '1C8E5C': 'HUAWEI TECHNOLOGIES CO.,LTD', '84742A': 'zte corporation', '9CD24B': 'zte corporation', 'C87B5B': 'zte corporation', '0007D8': 'Hitron Technologies. Inc', 'C03E0F': 'BSkyB Ltd', '904E2B': 'HUAWEI TECHNOLOGIES CO.,LTD', '2008ED': 'HUAWEI TECHNOLOGIES CO.,LTD', '00010F': 'Brocade Communications Systems, Inc.', '080088': 'Brocade Communications Systems, Inc.', '0034FE': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C85195': 'HUAWEI TECHNOLOGIES CO.,LTD', '40CBA8': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D46E5C': 'HUAWEI TECHNOLOGIES CO.,LTD', '8853D4': 'HUAWEI TECHNOLOGIES CO.,LTD', '04C06F': 'HUAWEI TECHNOLOGIES CO.,LTD', '202BC1': 'HUAWEI TECHNOLOGIES CO.,LTD', '54A51B': 'HUAWEI TECHNOLOGIES CO.,LTD', '002568': 'HUAWEI TECHNOLOGIES CO.,LTD', '781DBA': 'HUAWEI TECHNOLOGIES CO.,LTD', '00259E': 'HUAWEI TECHNOLOGIES CO.,LTD', '006B8E': 'Shanghai Feixun Communication Co.,Ltd.', 'D46AA8': 'HUAWEI TECHNOLOGIES CO.,LTD', 'BCADAB': 'Avaya Inc', 'FCA841': 'Avaya Inc', '3CB15B': 'Avaya Inc', '3475C7': 'Avaya Inc', 'B0ADAA': 'Avaya Inc', 'B4475E': 'Avaya Inc', 'B8BC1B': 'HUAWEI TECHNOLOGIES CO.,LTD', '582AF7': 'HUAWEI TECHNOLOGIES CO.,LTD', '24D921': 'Avaya Inc', '848371': 'Avaya Inc', '001B4F': 'Avaya Inc', '4C8BEF': 'HUAWEI TECHNOLOGIES CO.,LTD', '5006AB': 'Cisco Systems, Inc', 'FC48EF': 'HUAWEI TECHNOLOGIES CO.,LTD', '707BE8': 'HUAWEI TECHNOLOGIES CO.,LTD', '4C1FCC': 'HUAWEI TECHNOLOGIES CO.,LTD', '00906D': 'Cisco Systems, Inc', '0090AB': 'Cisco Systems, Inc', '005054': 'Cisco Systems, Inc', '00500B': 'Cisco Systems, Inc', '0003DD': 'Comark Interactive Solutions', '005053': 'Cisco Systems, Inc', '005050': 'Cisco Systems, Inc', 'D4B110': 'HUAWEI TECHNOLOGIES CO.,LTD', 'E468A3': 'HUAWEI TECHNOLOGIES CO.,LTD', '247189': 'Texas Instruments', '987BF3': 'Texas Instruments', 'A0F6FD': 'Texas Instruments', 'D0B5C2': 'Texas Instruments', '78A504': 'Texas Instruments', '6CECEB': 'Texas Instruments', '3400A3': 'HUAWEI TECHNOLOGIES CO.,LTD', '00902B': 'Cisco Systems, Inc', '00E08F': 'Cisco Systems, Inc', '001868': 'Cisco SPVTG', '887556': 'Cisco Systems, Inc', 'FC9947': 'Cisco Systems, Inc', '6C2056': 'Cisco Systems, Inc', '0015F2': 'ASUSTek COMPUTER INC.', '90E6BA': 'ASUSTek COMPUTER INC.', '002618': 'ASUSTek COMPUTER INC.', 'F46D04': 'ASUSTek COMPUTER INC.', '00E018': 'ASUSTek COMPUTER INC.', '000C6E': 'ASUSTek COMPUTER INC.', '000EA6': 'ASUSTek COMPUTER INC.', '001D60': 'ASUSTek COMPUTER INC.', '6C416A': 'Cisco Systems, Inc', 'ECE1A9': 'Cisco Systems, Inc', 'C067AF': 'Cisco Systems, Inc', 'ACF2C5': 'Cisco Systems, Inc', '2401C7': 'Cisco Systems, Inc', '6886A7': 'Cisco Systems, Inc', 'C0255C': 'Cisco Systems, Inc', '3C0E23': 'Cisco Systems, Inc', '08CC68': 'Cisco Systems, Inc', '0C2724': 'Cisco Systems, Inc', '6CFA89': 'Cisco Systems, Inc', '00E0A3': 'Cisco Systems, Inc', '602AD0': 'Cisco SPVTG', '745E1C': 'PIONEER CORPORATION', '046273': 'Cisco Systems, Inc', 'D8B190': 'Cisco Systems, Inc', '80E86F': 'Cisco Systems, Inc', 'AC7E8A': 'Cisco Systems, Inc', '1CE85D': 'Cisco Systems, Inc', 'A89D21': 'Cisco Systems, Inc', '689CE2': 'Cisco Systems, Inc', '0023BE': 'Cisco SPVTG', '185933': 'Cisco SPVTG', '445829': 'Cisco SPVTG', 'F44E05': 'Cisco Systems, Inc', '881DFC': 'Cisco Systems, Inc', '001011': 'Cisco Systems, Inc', '00000C': 'Cisco Systems, Inc', '28CFE9': 'Apple, Inc.', '00A040': 'Apple, Inc.', 'A0F849': 'Cisco Systems, Inc', '3CD0F8': 'Apple, Inc.', '680927': 'Apple, Inc.', '6CC26B': 'Apple, Inc.', '44D884': 'Apple, Inc.', '002608': 'Apple, Inc.', '0026B0': 'Apple, Inc.', '0026BB': 'Apple, Inc.', 'D49A20': 'Apple, Inc.', 'F81EDF': 'Apple, Inc.', 'C82A14': 'Apple, Inc.', '3C0754': 'Apple, Inc.', 'A4B197': 'Apple, Inc.', 'F0B479': 'Apple, Inc.', '1093E9': 'Apple, Inc.', '442A60': 'Apple, Inc.', 'A4D1D2': 'Apple, Inc.', '28CFDA': 'Apple, Inc.', '003065': 'Apple, Inc.', '001451': 'Apple, Inc.', '001E52': 'Apple, Inc.', '0021E9': 'Apple, Inc.', 'CC08E0': 'Apple, Inc.', '045453': 'Apple, Inc.', 'F4F951': 'Apple, Inc.', 'C06394': 'Apple, Inc.', '18AF8F': 'Apple, Inc.', 'C8B5B7': 'Apple, Inc.', '90B21F': 'Apple, Inc.', '30F7C5': 'Apple, Inc.', '40B395': 'Apple, Inc.', '44FB42': 'Apple, Inc.', 'E88D28': 'Apple, Inc.', '949426': 'Apple, Inc.', '207D74': 'Apple, Inc.', 'F4F15A': 'Apple, Inc.', 'C86F1D': 'Apple, Inc.', '3090AB': 'Apple, Inc.', '8C2DAA': 'Apple, Inc.', '848506': 'Apple, Inc.', '98FE94': 'Apple, Inc.', 'D8004D': 'Apple, Inc.', '64200C': 'Apple, Inc.', 'C8334B': 'Apple, Inc.', '64E682': 'Apple, Inc.', 'B8E856': 'Apple, Inc.', 'D89695': 'Apple, Inc.', '1499E2': 'Apple, Inc.', 'B418D1': 'Apple, Inc.', '9C207B': 'Apple, Inc.', 'B065BD': 'Apple, Inc.', '542696': 'Apple, Inc.', '64A3CB': 'Apple, Inc.', '903C92': 'Apple, Inc.', 'D81D72': 'Apple, Inc.', '341298': 'Apple, Inc.', '70E72C': 'Apple, Inc.', '70ECE4': 'Apple, Inc.', '68AE20': 'Apple, Inc.', 'AC87A3': 'Apple, Inc.', 'D8BB2C': 'Apple, Inc.', 'D04F7E': 'Apple, Inc.', '2078F0': 'Apple, Inc.', 'E0ACCB': 'Apple, Inc.', 'A0999B': 'Apple, Inc.', '24240E': 'Apple, Inc.', 'F0DBF8': 'Apple, Inc.', '48746E': 'Apple, Inc.', '54AE27': 'Apple, Inc.', 'FCE998': 'Apple, Inc.', '0CBC9F': 'Apple, Inc.', '34363B': 'Apple, Inc.', 'D0A637': 'Apple, Inc.', '789F70': 'Apple, Inc.', '9CF387': 'Apple, Inc.', 'A85B78': 'Apple, Inc.', 'C8F650': 'Apple, Inc.', 'A88E24': 'Apple, Inc.', '6C4A39': 'BITA', '847D50': 'Holley Metering Limited', '50D59C': 'Thai Habel Industrial Co., Ltd.', '20896F': 'Fiberhome Telecommunication Technologies Co.,LTD', '3052CB': 'Liteon Technology Corporation', '049645': 'WUXI SKY CHIP INTERCONNECTION TECHNOLOGY CO.,LTD.', '1CCDE5': 'Shanghai Wind Technologies Co.,Ltd', '681401': 'Hon Hai Precision Ind. Co.,Ltd.', '50A9DE': 'Smartcom - Bulgaria AD', 'AC1FD7': 'Real Vision Technology Co.,Ltd.', '88C242': 'Poynt Co.', '1402EC': 'Hewlett Packard Enterprise', 'E034E4': 'Feit Electric Company, Inc.', '34C9F0': 'LM Technologies Ltd', '68A828': 'HUAWEI TECHNOLOGIES CO.,LTD', 'CC20E8': 'Apple, Inc.', '48BF74': 'Baicells Technologies Co.,LTD', '6C9354': 'Yaojin Technology (Shenzhen) Co., LTD.', 'DCDC07': 'TRP Systems BV', '3891D5': 'Hangzhou H3C Technologies Co., Limited', '8CE2DA': 'Circle Media Inc', '382187': 'Midea Group Co., Ltd.', '78D6B2': 'Toshiba', 'E8DED6': 'Intrising Networks, Inc.', 'DC82F6': 'iPort', '70480F': 'Apple, Inc.', '3CB72B': 'PLUMgrid Inc', '489A42': 'Technomate Ltd', '20D160': 'Private', 'BC9C31': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D88B4C': 'KingTing Tech.', '384C90': 'ARRIS Group, Inc.', 'F0B0E7': 'Apple, Inc.', '381C23': 'Hilan Technology CO.,LTD', '649A12': 'P2 Mobile Technologies Limited', 'AC8995': 'AzureWave Technology Inc.', '0469F8': 'Apple, Inc.', '3029BE': 'Shanghai MRDcom Co.,Ltd', '20C3A4': 'RetailNext', 'E4C2D1': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D4F4BE': 'Palo Alto Networks', '48B620': 'ROLI Ltd.', 'D888CE': 'RF Technology Pty Ltd', '307CB2': 'ANOV FRANCE', '847973': 'Shanghai Baud Data Communication Co.,Ltd.', 'E8B4C8': 'Samsung Electronics Co.,Ltd', 'D087E2': 'Samsung Electronics Co.,Ltd', 'F05B7B': 'Samsung Electronics Co.,Ltd', 'B047BF': 'Samsung Electronics Co.,Ltd', '7C0BC6': 'Samsung Electronics Co.,Ltd', '7CFE90': 'Mellanox Technologies, Inc.', '845B12': 'HUAWEI TECHNOLOGIES CO.,LTD', '205531': 'Samsung Electronics Co.,Ltd', '10868C': 'ARRIS Group, Inc.', '44FDA3': 'Everysight LTD.', 'A8A795': 'Hon Hai Precision Ind. Co.,Ltd.', '906FA9': 'NANJING PUTIAN TELECOMMUNICATIONS TECHNOLOGY CO.,LTD.', '60FD56': 'WOORISYSTEMS CO., Ltd', '9870E8': 'INNATECH SDN BHD', '44656A': 'Mega Video Electronic(HK) Industry Co., Ltd', '78EB39': 'Instituto Nacional de Tecnología Industrial', '0C756C': 'Anaren Microwave, Inc.', 'D47BB0': 'ASKEY COMPUTER CORP', 'F0224E': 'Esan electronic co.', 'CC5FBF': 'Topwise 3G Communication Co., Ltd.', 'C8F9C8': 'NewSharp Technology(SuZhou)Co,Ltd', '80C5E6': 'Microsoft Corporation', '10DF8B': 'Shenzhen CareDear Communication Technology Co.,Ltd', 'C412F5': 'D-Link International', '5C4527': 'Juniper Networks', 'E02CB2': 'Lenovo Mobile Communication (Wuhan) Company Limited', '606DC7': 'Hon Hai Precision Ind. Co.,Ltd.', '68EDA4': 'Shenzhen Seavo Technology Co.,Ltd', '2CC5D3': 'Ruckus Wireless', 'F4573E': 'Fiberhome Telecommunication Technologies Co.,LTD', 'D0431E': 'Dell Inc.', '54CD10': 'Panasonic Mobile Communications Co.,Ltd.', '408D5C': 'GIGA-BYTE TECHNOLOGY CO.,LTD.', 'FCE1FB': 'Array Networks', 'E89120': 'Motorola Mobility LLC, a Lenovo Company', 'D4F9A1': 'HUAWEI TECHNOLOGIES CO.,LTD', '5440AD': 'Samsung Electronics Co.,Ltd', 'A424DD': 'Cambrionix Ltd', '145A83': 'Logi-D inc', '804E81': 'Samsung Electronics Co.,Ltd', 'C09A71': 'XIAMEN MEITU MOBILE TECHNOLOGY CO.LTD', '64167F': 'Polycom', '340B40': 'MIOS ELETTRONICA SRL', '48066A': 'Tempered Networks, Inc.', 'BCF811': 'Xiamen DNAKE Technology Co.,Ltd', '900A39': 'Wiio, Inc.', 'D0C0BF': 'Actions Microelectronics Co., Ltd', 'E861BE': 'Melec Inc.', '5870C6': 'Shanghai Xiaoyi Technology Co., Ltd.', 'FC8F90': 'Samsung Electronics Co.,Ltd', '74042B': 'Lenovo Mobile Communication (Wuhan) Company Limited', 'F87AEF': 'Rosonix Technology, Inc.', '2028BC': 'Visionscape Co,. Ltd.', '241C04': 'SHENZHEN JEHE TECHNOLOGY DEVELOPMENT CO., LTD.', 'D05C7A': 'Sartura d.o.o.', 'BC5C4C': 'ELECOM CO.,LTD.', '887033': 'Hangzhou Silan Microelectronic Inc', 'DC56E6': 'Shenzhen Bococom Technology Co.,LTD', 'CC9635': 'LVS Co.,Ltd.', '4480EB': 'Motorola Mobility LLC, a Lenovo Company', '402814': 'RFI Engineering', '144146': 'Honeywell (China) Co., LTD', '94F19E': 'HUIZHOU MAORONG INTELLIGENT TECHNOLOGY CO.,LTD', 'B856BD': 'ITT LLC', '40A5EF': 'Shenzhen Four Seas Global Link Network Technology Co., Ltd.', 'C4924C': 'KEISOKUKI CENTER CO.,LTD.', '749CE3': 'KodaCloud Canada, Inc', 'D45556': 'Fiber Mountain Inc.', '50502A': 'Egardia', '749637': 'Todaair Electronic Co., Ltd', '88E603': 'Avotek corporation', '2CAD13': 'SHENZHEN ZHILU TECHNOLOGY CO.,LTD', '78ACBF': 'Igneous Systems', 'C48E8F': 'Hon Hai Precision Ind. Co.,Ltd.', 'C4366C': 'LG Innotek', '54369B': '1Verge Internet Technology (Beijing) Co., Ltd.', '40D28A': 'Nintendo Co., Ltd.', '4062B6': 'Tele system communication', '60128B': 'CANON INC.', '84C3E8': 'Vaillant GmbH', '9CD35B': 'Samsung Electronics Co.,Ltd', 'A89FBA': 'Samsung Electronics Co.,Ltd', 'B88EC6': 'Stateless Networks', 'BCD165': 'Cisco SPVTG', '28E476': 'Pi-Coral', '4CA515': 'Baikal Electronics JSC', '7C3CB6': 'Shenzhen Homecare Technology Co.,Ltd.', '1C7D22': 'Fuji Xerox Co., Ltd.', '20C06D': 'SHENZHEN SPACETEK TECHNOLOGY CO.,LTD', '4886E8': 'Microsoft Corporation', '74A34A': 'ZIMI CORPORATION', 'A86405': 'nimbus 9, Inc', '30D587': 'Samsung Electronics Co.,Ltd', '6828F6': 'Vubiq Networks, Inc.', 'DC60A1': 'Teledyne DALSA Professional Imaging', '10A659': 'Mobile Create Co.,Ltd.', '58856E': 'QSC AG', '8C9109': 'Toyoshima Electric Technoeogy(Suzhou) Co.,Ltd.', '54098D': 'deister electronic GmbH', 'D05BA8': 'zte corporation', 'E4BAD9': '360 Fly Inc.', 'EC59E7': 'Microsoft Corporation', 'C035C5': 'Prosoft Systems LTD', 'A0A3E2': 'Actiontec Electronics, Inc', '74547D': 'Cisco SPVTG', '00E6E8': 'Netzin Technology Corporation,.Ltd.', '50F43C': 'Leeo Inc', 'E887A3': 'Loxley Public Company Limited', '10C67E': 'SHENZHEN JUCHIN TECHNOLOGY CO., LTD', '08A5C8': 'Sunnovo International Limited', 'F88479': 'Yaojin Technology(Shenzhen)Co.,Ltd', '2C2997': 'Microsoft Corporation', 'D46132': 'Pro Concept Manufacturer Co.,Ltd.', '54A050': 'ASUSTek COMPUTER INC.', '4C2C83': 'Zhejiang KaNong Network Technology Co.,Ltd.', '908C09': 'Total Phase', 'DC2F03': 'Step forward Group Co., Ltd.', '380E7B': 'V.P.S. Thai Co., Ltd', '2C3796': 'CYBO CO.,LTD.', '587FB7': 'SONAR INDUSTRIAL CO., LTD.', '08EB29': 'Jiangsu Huitong Group Co.,Ltd.', '409B0D': 'Shenzhen Yourf Kwan Industrial Co., Ltd', 'D8CB8A': 'Micro-Star INTL CO., LTD.', '70FC8C': 'OneAccess SA', '58108C': 'Intelbras', '4C7403': 'BQ', '8C5D60': 'UCI Corporation Co.,Ltd.', 'BC6B4D': 'Nokia', 'C8D019': 'Shanghai Tigercel Communication Technology Co.,Ltd', '902181': 'Shanghai Huaqin Telecom Technology Co.,Ltd', '849681': 'Cathay Communication Co.,Ltd', '2CA30E': 'POWER DRAGON DEVELOPMENT LIMITED', '8486F3': 'Greenvity Communications', '1C5216': 'DONGGUAN HELE ELECTRONICS CO., LTD', '6099D1': 'Vuzix / Lenovo', 'B04515': 'mira fitness,LLC.', '14488B': 'Shenzhen Doov Technology Co.,Ltd', '2012D5': 'Scientech Materials Corporation', '6C6EFE': 'Core Logic Inc.', '3808FD': 'Silca Spa', '784561': 'CyberTAN Technology Inc.', '94CE31': 'CTS Limited', '80F8EB': 'RayTight', 'D437D7': 'zte corporation', '2C010B': 'NASCENT Technology, LLC - RemKon', '04DEDB': 'Rockport Networks Inc', '1C4840': 'IMS Messsysteme GmbH', '6050C1': 'Kinetek Sports', '2C1A31': 'Electronics Company Limited', '90B686': 'Murata Manufacturing Co., Ltd.', 'ECD9D1': 'Shenzhen TG-NET Botone Technology Co.,Ltd.', '6C0273': 'Shenzhen Jin Yun Video Equipment Co., Ltd.', '54DF00': 'Ulterius Technologies, LLC', '20ED74': 'Ability enterprise co.,Ltd.', 'A056B2': 'Harman/Becker Automotive Systems GmbH', '44666E': 'IP-LINE', '244F1D': 'iRule LLC', '94AEE3': 'Belden Hirschmann Industries (Suzhou) Ltd.', '5CB6CC': 'NovaComm Technologies Inc.', '705B2E': 'M2Communication Inc.', '5C1515': 'ADVAN', 'D059E4': 'Samsung Electronics Co.,Ltd', '14A364': 'Samsung Electronics Co.,Ltd', 'B40AC6': 'DEXON Systems Ltd.', 'D866EE': 'BOXIN COMMUNICATION CO.,LTD.', '2829CC': 'Corsa Technology Incorporated', '184A6F': 'Alcatel-Lucent Shanghai Bell Co., Ltd', '10C37B': 'ASUSTek COMPUTER INC.', 'E85D6B': 'Luminate Wireless', 'B40B44': 'Smartisan Technology Co., Ltd.', '3431C4': 'AVM GmbH', '50FEF2': 'Sify Technologies Ltd', '34C5D0': 'Hagleitner Hygiene International GmbH', 'A012DB': 'TABUCHI ELECTRIC CO.,LTD', '481A84': 'Pointer Telocation Ltd', 'DC663A': 'Apacer Technology Inc.', '08DF1F': 'Bose Corporation', '581F67': 'Open-m technology limited', '4826E8': 'Tek-Air Systems, Inc.', 'D05AF1': 'Shenzhen Pulier Tech CO.,Ltd', 'FCE186': 'A3M Co., LTD', '54EF92': 'Shenzhen Elink Technology Co., LTD', 'ECE512': 'tado GmbH', '30918F': 'Technicolor', '5CF4AB': 'Zyxel Communications Corporation', '08B2A3': 'Cynny Italia S.r.L.', '70305D': 'Ubiquoss Inc', 'D4CFF9': 'Shenzhen Sen5 Technology Co., Ltd.', '488244': 'Life Fitness / Div. of Brunswick', 'C83168': 'eZEX corporation', '40B3CD': 'Chiyoda Electronics Co.,Ltd.', '24336C': 'Private', 'B87CF2': 'Aerohive Networks Inc.', '202564': 'PEGATRON CORPORATION', 'C8D590': 'FLIGHT DATA SYSTEMS', '7CFF62': 'Huizhou Super Electron Technology Co.,Ltd.', '84B59C': 'Juniper Networks', 'C09D26': 'Topicon HK Lmd.', '442938': 'NietZsche enterprise Co.Ltd.', 'A0DA92': 'Nanjing Glarun Atten Technology Co. Ltd.', 'ECF72B': 'HD DIGITAL TECH CO., LTD.', '4CF45B': 'Blue Clover Devices', 'B06971': 'DEI Sales, Inc.', '1889DF': 'CerebrEX Inc.', '54C80F': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'E4D332': 'TP-LINK TECHNOLOGIES CO.,LTD.', '0C2026': 'noax Technologies AG', '283B96': 'Cool Control LTD', '648D9E': 'IVT Electronic Co.,Ltd', 'CC95D7': 'Vizio, Inc', 'FC09F6': 'GUANGDONG TONZE ELECTRIC CO.,LTD', 'BCF61C': 'Geomodeling Wuxi Technology Co. Ltd.', 'FC923B': 'Nokia Corporation', 'E03F49': 'ASUSTek COMPUTER INC.', '88A73C': 'Ragentek Technology Group', '88D962': 'Canopus Systems US LLC', 'D09C30': 'Foster Electric Company, Limited', '949F3F': 'Optek Digital Technology company limited', 'E817FC': 'Fujitsu Cloud Technologies Limited', '78FEE2': 'Shanghai Diveo Technology Co., Ltd', '086DF2': 'Shenzhen MIMOWAVE Technology Co.,Ltd', '687848': 'Westunitis Co., Ltd.', '48D0CF': 'Universal Electronics, Inc.', '9C3EAA': 'EnvyLogic Co.,Ltd.', '1CFCBB': 'Realfiction ApS', 'F8F005': 'Newport Media Inc.', '28656B': 'Keystone Microtech Corporation', 'CC9F35': 'Transbit Sp. z o.o.', 'DCC793': 'Nokia Corporation', '444891': 'HDMI Licensing, LLC', '5C254C': 'Avire Global Pte Ltd', 'D42F23': 'Akenori PTE Ltd', '98C0EB': 'Global Regency Ltd', 'F03FF8': 'R L Drake', 'B0C554': 'D-Link International', '48EE86': 'UTStarcom (China) Co.,Ltd', '888914': 'All Components Incorporated', 'F0321A': 'Mita-Teknik A/S', 'A881F1': 'BMEYE B.V.', 'C4E984': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'D069D0': 'Verto Medical Solutions, LLC', 'FC1349': 'Global Apps Corp.', '848433': 'Paradox Engineering SA', '44C306': 'SIFROM Inc.', '602103': 'I4VINE, INC', 'FC07A0': 'LRE Medical GmbH', '1CEEE8': 'Ilshin Elecom', '9031CD': 'Onyx Healthcare Inc.', 'E48184': 'Nokia', '14C089': 'DUNE HD LTD', 'F4B6E5': 'TerraSem Co.,Ltd', '60FFDD': 'C.E. ELECTRONICS, INC', '24050F': 'MTN Electronic Co. Ltd', 'AC2DA3': 'TXTR GmbH', '889CA6': 'BTB Korea INC', '90837A': 'General Electric Water & Process Technologies', '80BAE6': 'Neets', 'F8A2B4': 'RHEWA-WAAGENFABRIK August Freudewald GmbH &amp;Co. KG', 'E056F4': 'AxesNetwork Solutions inc.', '84569C': 'Coho Data, Inc.,', '78AE0C': 'Far South Networks', 'B024F3': 'Progeny Systems', '0C54A5': 'PEGATRON CORPORATION', '8C4DB9': 'Unmonday Ltd', '78CA5E': 'ELNO', '2C5A05': 'Nokia Corporation', 'D481CA': 'iDevices, LLC', 'D858D7': 'CZ.NIC, z.s.p.o.', 'B0D7C5': 'Logipix Ltd', 'A43A69': 'Vers Inc', '9C44A6': 'SwiftTest, Inc.', '4CD9C4': 'Magneti Marelli Automotive Electronics (Guangzhou) Co. Ltd', '50E0C7': 'TurControlSystme AG', 'B4827B': 'AKG Acoustics GmbH', 'DC5E36': 'Paterson Technology', 'DCAD9E': 'GreenPriz', 'B8DF6B': 'SpotCam Co., Ltd.', '9C8888': 'Simac Techniek NV', '10B26B': 'base Co.,Ltd.', '4CF02E': 'Vifa Denmark A/S', '288A1C': 'Juniper Networks', 'DCCEBC': 'Shenzhen JSR Technology Co.,Ltd.', '18C8E7': 'Shenzhen Hualistone Technology Co.,Ltd', 'A03B1B': 'Inspire Tech', '7CCD3C': 'Guangzhou Juzing Technology Co., Ltd', '9843DA': 'INTERTECH', '08CA45': 'Toyou Feiji Electronics Co., Ltd.', '3CCA87': 'Iders Incorporated', '7C6AB3': 'IBC TECHNOLOGIES INC.', '181BEB': 'Actiontec Electronics, Inc', '84A783': 'Alcatel Lucent', '041A04': 'WaveIP', '68764F': 'Sony Mobile Communications Inc', '34A5E1': 'Sensorist ApS', '8079AE': 'ShanDong Tecsunrise Co.,Ltd', '7CBD06': 'AE REFUsol', '94BA56': 'Shenzhen Coship Electronics Co., Ltd.', '38DBBB': 'Sunbow Telecom Co., Ltd.', '448A5B': "Micro-Star INT'L CO., LTD.", '6C4B7F': 'Vossloh-Schwabe Deutschland GmbH', '908C44': 'H.K ZONGMU TECHNOLOGY CO., LTD.', '688AB5': 'EDP Servicos', '2493CA': 'Voxtronic Austria', '0CCB8D': 'ASCO Numatics GmbH', '907990': 'Benchmark Electronics Romania SRL', '740EDB': 'Optowiz Co., Ltd', '6C8366': 'Nanjing SAC Power Grid Automation Co., Ltd.', 'F83D4E': 'Softlink Automation System Co., Ltd', '74D435': 'GIGA-BYTE TECHNOLOGY CO.,LTD.', '78D99F': 'NuCom HK Ltd.', '142BD2': 'Armtel Ltd.', '9CA10A': 'SCLE SFE', '88789C': 'Game Technologies SA', 'A4895B': 'ARK INFOSOLUTIONS PVT LTD', 'D09D0A': 'LINKCOM', 'EC219F': 'VidaBox LLC', 'A8CCC5': 'Saab AB (publ)', '58468F': 'Koncar Electronics and Informatics', '985D46': 'PeopleNet Communication', 'F89FB8': 'YAZAKI Energy System Corporation', 'F0F5AE': 'Adaptrum Inc.', 'FC3FAB': 'Henan Lanxin Technology Co., Ltd', '988E4A': 'NOXUS(BEIJING) TECHNOLOGY CO.,LTD', 'EC2AF0': 'Ypsomed AG', 'F854AF': 'ECI Telecom Ltd.', '54BEF7': 'PEGATRON CORPORATION', '50B888': 'wi2be Tecnologia S/A', '7C8306': 'Glen Dimplex Nordic as', '442AFF': 'E3 Technology, Inc.', '140D4F': 'Flextronics International', '446755': 'Orbit Irrigation', 'E0FAEC': 'Platan sp. z o.o. sp. k.', '7CE56B': 'ESEN Optoelectronics Technology Co.,Ltd.', 'D44C9C': 'Shenzhen YOOBAO Technology Co.Ltd', '20CEC4': 'Peraso Technologies', 'CC4703': 'Intercon Systems Co., Ltd.', 'ACCA8E': 'ODA Technologies', '088E4F': 'SF Software Solutions', '540536': 'Vivago Oy', '6C90B1': 'SanLogic Inc', 'CC7B35': 'zte corporation', '04D437': 'ZNV', 'CCF407': 'EUKREA ELECTROMATIQUE SARL', 'DC1792': 'Captivate Network', '28A241': 'exlar corp', '509871': 'Inventum Technologies Private Limited', '048C03': 'ThinPAD Technology (Shenzhen)CO.,LTD', '88462A': 'Telechips Inc.', 'C80258': 'ITW GSE ApS', '30786B': 'TIANJIN Golden Pentagon Electronics Co., Ltd.', '20DF3F': 'Nanjing SAC Power Grid Automation Co., Ltd.', 'F8516D': 'Denwa Technology Corp.', '444A65': 'Silverflare Ltd.', '744BE9': 'EXPLORER HYPERTECH CO.,LTD', 'FC6018': 'Zhejiang Kangtai Electric Co., Ltd.', 'F42012': 'Cuciniale GmbH', '98B039': 'Nokia', 'B830A8': 'Road-Track Telematics Development', '4CD637': 'Qsono Electronics Co., Ltd', '9436E0': 'Sichuan Bihong Broadcast &amp; Television New Technologies Co.,Ltd', '5422F8': 'zte corporation', '486E73': 'Pica8, Inc.', '6405BE': 'NEW LIGHT LED', '646EEA': 'Iskratel d.o.o.', 'D0737F': 'Mini-Circuits', 'E8BB3D': 'Sino Prime-Tech Limited', '28285D': 'Zyxel Communications Corporation', '0CF019': 'Malgn Technology Co., Ltd.', '949FB4': 'ChengDu JiaFaAnTai Technology Co.,Ltd', '406826': 'Thales UK Limited', 'F82BC8': 'Jiangsu Switter Co., Ltd', '60C397': '2Wire Inc', 'E07F88': 'EVIDENCE Network SIA', '1C7CC7': 'Coriant GmbH', '341B22': 'Grandbeing Technology Co., Ltd', '40560C': 'In Home Displays Ltd', '58E02C': 'Micro Technic A/S', '78B3CE': 'Elo touch solutions', '88142B': 'Protonic Holland', 'A4FCCE': 'Security Expert Ltd.', '1C08C1': 'Lg Innotek', '34A68C': 'Shine Profit Development Limited', '341A4C': 'SHENZHEN WEIBU ELECTRONICS CO.,LTD.', '0488E2': 'Beats Electronics LLC', 'A47ACF': 'VIBICOM COMMUNICATIONS INC.', 'BC261D': 'HONG KONG TECON TECHNOLOGY', 'E8CE06': 'SkyHawke Technologies, LLC.', 'C8F386': 'Shenzhen Xiaoniao Technology Co.,Ltd', 'E0DCA0': 'Siemens Industrial Automation Products Ltd Chengdu', '842F75': 'Innokas Group', 'CC3C3F': 'SA.S.S. Datentechnik AG', '2C69BA': 'RF Controls, LLC', 'D4BF7F': 'UPVEL', '2C72C3': 'Soundmatters', 'C44838': 'Satcom Direct, Inc.', 'C8DDC9': 'Lenovo Mobile Communication Technology Ltd.', '6C8686': 'Technonia', 'D4AC4E': 'BODi rS, LLC', '204C6D': 'Hugo Brennenstuhl Gmbh & Co. KG.', '40C4D6': 'ChongQing Camyu Technology Development Co.,Ltd.', 'A8294C': 'Precision Optical Transceivers, Inc.', '70C6AC': 'Bosch Automotive Aftermarket', '7C0507': 'PEGATRON CORPORATION', '880905': 'MTMCommunications', '30D46A': 'Autosales Incorporated', '282CB2': 'TP-LINK TECHNOLOGIES CO.,LTD.', '64E599': 'EFM Networks', '308999': 'Guangdong East Power Co.,', 'C89346': 'MXCHIP Company Limited', 'F4B381': 'WindowMaster A/S', '74F102': 'Beijing HCHCOM Technology Co., Ltd', 'A0861D': 'Chengdu Fuhuaxin Technology co.,Ltd', '508D6F': 'CHAHOO Limited', 'E8DE27': 'TP-LINK TECHNOLOGIES CO.,LTD.', '94ACCA': 'trivum technologies GmbH', '9C9726': 'Technicolor', '908260': 'IEEE 1904.1 Working Group', 'D4EE07': 'HIWIFI Co., Ltd.', 'FCAD0F': 'QTS NETWORKS', '984C04': 'Zhangzhou Keneng Electrical Equipment Co Ltd', 'A4E991': 'SISTEMAS AUDIOVISUALES ITELSIS S.L.', '3C86A8': 'Sangshin elecom .co,, LTD', '84F493': 'OMS spol. s.r.o.', 'BCD177': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'ACDBDA': 'Shenzhen Geniatech Inc, Ltd', 'D42751': 'Infopia Co., Ltd', '103DEA': 'HFC Technology (Beijing) Ltd. Co.', 'F05DC8': 'Duracell Powermat', 'CC5D57': 'Information System Research Institute,Inc.', '64C667': 'Barnes&Noble', 'F0F260': 'Mobitec AB', '044CEF': 'Fujian Sanao Technology Co.,Ltd', '4C804F': 'Armstrong Monitoring Corp', '7CD762': 'Freestyle Technology Pty Ltd', '901D27': 'zte corporation', '9C3178': 'Foshan Huadian Intelligent Communications Teachnologies Co.,Ltd', 'B0C95B': 'Beijing Symtech CO.,LTD', '4CCA53': 'Skyera, Inc.', '90FF79': 'Metro Ethernet Forum', 'B01408': 'LIGHTSPEED INTERNATIONAL CO.', '081DFB': 'Shanghai Mexon Communication Technology Co.,Ltd', '983F9F': 'China SSJ (Suzhou) Network Technology Inc.', 'B838CA': 'Kyokko Tsushin System CO.,LTD', 'C44EAC': 'Shenzhen Shiningworth Technology Co., Ltd.', 'A80180': 'IMAGO Technologies GmbH', '0C5521': 'Axiros GmbH', '68A40E': 'BSH Hausgeräte GmbH', 'F4C6D7': 'blackned GmbH', 'D4CA6E': 'u-blox AG', '5C43D2': 'HAZEMEYER', 'D809C3': 'Cercacor Labs', 'E0C2B7': 'Masimo Corporation', 'A01917': 'Bertel S.p.a.', '68B8D9': 'Act KDE, Inc.', '90CC24': 'Synaptics, Inc', '2CE871': 'Alert Metalguard ApS', 'F87B62': 'FASTWEL INTERNATIONAL CO., LTD. Taiwan Branch', '40270B': 'Mobileeco Co., Ltd', '74FE48': 'ADVANTECH CO., LTD.', '80B95C': 'ELFTECH Co., Ltd.', '38B5BD': 'E.G.O. Elektro-Ger', '20918A': 'PROFALUX', 'E4EEFD': 'MR&D Manufacturing', '105CBF': 'DuroByte Inc', 'A46E79': 'DFT System Co.Ltd', 'C88A83': 'Dongguan HuaHong Electronics Co.,Ltd', '8CC5E1': 'ShenZhen Konka Telecommunication Technology Co.,Ltd', '64A341': 'Wonderlan (Beijing) Technology Co., Ltd.', '7898FD': 'Q9 Networks Inc.', 'D063B4': 'SolidRun Ltd.', '9C541C': 'Shenzhen My-power Technology Co.,Ltd', '8C3330': 'EmFirst Co., Ltd.', '087BAA': 'SVYAZKOMPLEKTSERVICE, LLC', '24F2DD': 'Radiant Zemax LLC', '20B5C6': 'Mimosa Networks', 'E4E409': 'LEIFHEIT AG', 'B877C3': 'METER Group', '004D32': 'Andon Health Co.,Ltd.', 'FCA9B0': 'MIARTECH (SHANGHAI),INC.', '94DE80': 'GIGA-BYTE TECHNOLOGY CO.,LTD.', '2C441B': 'Spectrum Medical Limited', 'EC4993': 'Qihan Technology Co., Ltd ', 'B0ACFA': 'FUJITSU LIMITED', '8CE081': 'zte corporation', '5865E6': 'INFOMARK CO., LTD.', 'A44E2D': 'Adaptive Wireless Solutions, LLC', '0CCDFB': 'EDIC Systems Inc.', '9C8D1A': 'INTEG process group inc', '480362': 'DESAY ELECTRONICS(HUIZHOU)CO.,LTD', '18673F': 'Hanover Displays Limited', '7C0A50': 'J-MEX Inc.', '5011EB': 'SilverNet Ltd', '54DF63': 'Intrakey technologies GmbH', '40F2E9': 'IBM', '744D79': 'Arrive Systems Inc.', '9C0473': 'Tecmobile (International) Ltd. ', 'B4DFFA': 'Litemax Electronics Inc.', '681CA2': 'Rosewill Inc.', '604616': 'XIAMEN VANN INTELLIGENT CO., LTD', 'E45614': 'Suttle Apparatus', '3C83B5': 'Advance Vision Electronics Co. Ltd.', '28A192': 'GERP Solution', '106FEF': 'Ad-Sol Nissin Corp', '6C40C6': 'Nimbus Data, Inc.', '1048B1': 'Beijing Duokan Technology Limited', 'D493A0': 'Fidelix Oy', '08EBED': 'World Elite Technology Co.,LTD', 'DC9FA4': 'Nokia Corporation', '44C39B': 'OOO RUBEZH NPO', 'C44567': 'SAMBON PRECISON and ELECTRONICS', '48F8B3': 'Cisco-Linksys, LLC', 'D8D27C': 'JEMA ENERGY, SA', 'B01203': 'Dynamics Hong Kong Limited', '9886B1': 'Flyaudio corporation (China)', '7093F8': 'Space Monkey, Inc.', '28B3AB': 'Genmark Automation', 'C4E7BE': 'SCSpro Co.,Ltd', '58874C': 'LITE-ON CLEAN ENERGY TECHNOLOGY CORP.', '2891D0': 'Stage Tec Entwicklungsgesellschaft für professionelle Audiotechnik mbH', 'C0BD42': 'ZPA Smart Energy a.s.', 'FC52CE': 'Control iD', '5C4A26': 'Enguity Technology Corp', '60F2EF': 'VisionVera International Co., Ltd.', 'C03F2A': 'Biscotti, Inc.', '381C4A': 'SIMCom Wireless Solutions Co.,Ltd.', 'D8C691': 'Hichan Technology Corp.', 'E43FA2': 'Wuxi DSP Technologies Inc.', 'F4B72A': 'TIME INTERCONNECT LTD', '749975': 'IBM Corporation', '2C625A': 'Finest Security Systems Co., Ltd', '2074CF': 'Shenzhen Voxtech Co.,Ltd', 'E0F5CA': 'CHENG UEI PRECISION INDUSTRY CO.,LTD.', 'A8EF26': 'Tritonwave', '20DC93': 'Cheetah Hi-Tech, Inc.', '4423AA': 'Farmage Co., Ltd.', '7CFE28': 'Salutron Inc.', 'E8102E': 'Really Simple Software, Inc', '0C565C': 'HyBroad Vision (Hong Kong) Technology Co Ltd', '8C6AE4': 'Viogem Limited', '543968': 'Edgewater Networks Inc', '440CFD': 'NetMan Co., Ltd.', '8CD3A2': 'VisSim AS', 'D82DE1': 'Tricascade Inc.', '14358B': 'Mediabridge Products, LLC.', '00F403': 'Orbis Systems Oy', '547398': 'Toyo Electronics Corporation', 'E0A30F': 'Pevco', '88DC96': 'SENAO Networks, Inc.', '20443A': 'Schneider Electric Asia Pacific Ltd', 'C4393A': 'SMC Networks Inc', '5C2479': 'Baltech AG', 'EC9327': 'MEMMERT GmbH + Co. KG', 'A0EF84': "Seine Image Int'l Co., Ltd", '64517E': 'LONG BEN (DONGGUAN) ELECTRONIC TECHNOLOGY CO.,LTD.', 'D43D7E': "Micro-Star Int'l Co, Ltd", 'ACD9D6': 'tci GmbH', '48282F': 'zte corporation', '60CBFB': 'AirScape Inc.', '7C160D': 'Saia-Burgess Controls AG', 'A497BB': 'Hitachi Industrial Equipment Systems Co.,Ltd', '4C5427': 'Linepro Sp. z o.o.', '80D18B': "Hangzhou I'converge Technology Co.,Ltd", '4088E0': 'Beijing Ereneben Information Technology Limited Shenzhen Branch', 'E85484': 'NEO Information Systems Co., Ltd.', '74AE76': 'iNovo Broadband, Inc.', 'EC1A59': 'Belkin International Inc.', '881036': 'Panodic(ShenZhen) Electronics Limted', '68B6FC': 'Hitron Technologies. Inc', 'ECA29B': 'Kemppi Oy', '04CE14': 'Wilocity LTD.', 'C4BA99': 'I+ME Actia Informatik und Mikro-Elektronik GmbH', 'A4934C': 'Cisco Systems, Inc', 'D0D212': 'K2NET Co.,Ltd.', 'B0435D': 'NuLEDs, Inc.', '0808EA': 'AMSC', 'E8D483': 'ULTIMATE Europe Transportation Equipment GmbH', '1C8464': 'FORMOSA WIRELESS COMMUNICATION CORP.', '346E8A': 'Ecosense', '64F242': 'Gerdes Aktiengesellschaft', '60F281': 'TRANWO TECHNOLOGY CO., LTD.', '942197': 'Stalmart Technology Limited', 'A0C3DE': 'Triton Electronic Systems Ltd.', 'D0699E': 'LUMINEX Lighting Control Equipment', '0CC0C0': 'MAGNETI MARELLI SISTEMAS ELECTRONICOS MEXICO', '08379C': 'Topaz Co. LTD.', 'D80DE3': 'FXI TECHNOLOGIES AS', 'B0D2F5': 'Vello Systems, Inc.', '709A0B': 'Italian Institute of Technology', 'F0FDA0': 'Acurix Networks Pty Ltd', 'B45570': 'Borea', '100D2F': 'Online Security Pty. Ltd.', '142DF5': 'Amphitech', '5057A8': 'Cisco Systems, Inc', '00DEFB': 'Cisco Systems, Inc', '3CA315': 'Bless Information & Communications Co., Ltd', 'F83094': 'Alcatel-Lucent Telecom Limited', '10A932': 'Beijing Cyber Cloud Technology Co. ,Ltd.', '34FC6F': 'ALCEA', 'C0B357': 'Yoshiki Electronics Industry Ltd.', '3C98BF': 'Quest Controls, Inc.', 'D0AEEC': 'Alpha Networks Inc.', 'E81324': 'GuangZhou Bonsoninfo System CO.,LTD', 'E477D4': 'Minrray Industry Co.,Ltd ', '38E08E': 'Mitsubishi Electric Corporation', 'E4C806': 'Ceiec Electric Technology Inc.', 'E0F9BE': 'Cloudena Corp.', 'B88F14': 'Analytica GmbH', '7C94B2': 'Philips Healthcare PCCI', '442B03': 'Cisco Systems, Inc', 'F473CA': 'Conversion Sound Inc.', 'F8F7FF': 'SYN-TECH SYSTEMS INC', 'A81758': 'Elektronik System i Umeå AB', '882012': 'LMI Technologies', '60E956': 'Ayla Networks, Inc', 'EC1120': 'FloDesign Wind Turbine Corporation', 'F897CF': 'DAESHIN-INFORMATION TECHNOLOGY CO., LTD.', '08B4CF': 'Abicom International', 'C495A2': 'SHENZHEN WEIJIU INDUSTRY AND TRADE DEVELOPMENT CO., LTD', '8C6878': 'Nortek-AS', '202598': 'Teleview', '38F8B7': 'V2COM PARTICIPACOES S.A.', 'F8D462': 'Pumatronix Equipamentos Eletronicos Ltda.', 'A0DC04': 'Becker-Antriebe GmbH', '40605A': 'Hawkeye Tech Co. Ltd', 'A04CC1': 'Helixtech Corp.', '34A7BA': 'Fischer International Systems Corporation', '0463E0': 'Nome Oy', 'B49EE6': 'SHENZHEN TECHNOLOGY CO LTD', 'BC4B79': 'SensingTek', 'A49005': 'CHINA GREATWALL COMPUTER SHENZHEN CO.,LTD', 'C40ACB': 'Cisco Systems, Inc', 'E86D6E': 'voestalpine SIGNALING Fareham Ltd.', '681605': 'Systems And Electronic Development FZCO', 'D4A02A': 'Cisco Systems, Inc', '3C4E47': 'Etronic A/S', 'F48771': 'Infoblox', '5453ED': 'Sony Corporation', '00376D': 'Murata Manufacturing Co., Ltd.', '50008C': 'Hong Kong Telecommunications (HKT) Limited', '902B34': 'GIGA-BYTE TECHNOLOGY CO.,LTD.', '88C36E': 'Beijing Ereneben lnformation Technology Limited', '4C9E80': 'KYOKKO ELECTRIC Co., Ltd.', '5CEB4E': 'R. STAHL HMI Systems GmbH', '34AA99': 'Nokia', '645563': 'Intelight Inc.', '943AF0': 'Nokia Corporation', '645422': 'Equinox Payments', '080D84': 'GECO, Inc.', '88E712': 'Whirlpool Corporation', 'D412BB': 'Quadrant Components Inc. Ltd', '24B88C': 'Crenus Co.,Ltd.', 'BCFE8C': 'Altronic, LLC', '649EF3': 'Cisco Systems, Inc', '24BBC1': 'Absolute Analysis', '9CCAD9': 'Nokia Corporation', '046D42': 'Bryston Ltd.', 'D8E743': 'Wush, Inc', '644D70': 'dSPACE GmbH', 'DCC101': 'SOLiD Technologies, Inc.', '3CE624': 'LG Display ', 'D8F0F2': 'Zeebo Inc', '806007': 'RIM', '38A851': 'Moog, Ing', '94E0D0': 'HealthStream Taiwan Inc.', 'D8052E': 'Skyviia Corporation', '80946C': 'TOKYO RADAR CORPORATION', 'D0CF5E': 'Energy Micro AS', '1803FA': 'IBT Interfaces', '306E5C': 'Validus Technologies', 'C894D2': 'Jiangsu Datang Electronic Products Co., Ltd', 'C8A620': 'Nebula, Inc', 'EC6264': 'Global411 Internet Services, LLC', '00F051': 'KWB Gmbh', 'FC946C': 'UBIVELOX', '407B1B': 'Mettle Networks Inc.', '40D559': 'MICRO S.E.R.I.', '306CBE': 'Skymotion Technology (HK) Limited', '183825': 'Wuhan Lingjiu High-tech Co.,Ltd.', '7C4B78': 'Red Sun Synthesis Pte Ltd', '64A0E7': 'Cisco Systems, Inc', 'DCF858': 'Lorent Networks, Inc.', '940B2D': 'NetView Technologies(Shenzhen) Co., Ltd', '803F5D': 'Winstars Technology Ltd', '40BF17': 'Digistar Telecom. SA', '780738': 'Z.U.K. Elzab S.A.', '2037BC': 'Kuipers Electronic Engineering BV', '94319B': 'Alphatronics BV', '00E175': 'AK-Systems Ltd', 'CC501C': 'KVH Industries, Inc.', '04D783': 'Y&H E&C Co.,LTD.', '54A9D4': 'Minibar Systems', 'B0E50E': 'NRG SYSTEMS INC', '64808B': 'VG Controls, Inc.', '48C1AC': 'PLANTRONICS, INC.', '98588A': 'SYSGRATION Ltd.', '2437EF': 'EMC Electronic Media Communication SA', '28B0CC': 'Xenya d.o.o.', '205B5E': 'Shenzhen Wonhe Technology Co., Ltd', 'C058A7': 'Pico Systems Co., Ltd.', 'EC3F05': 'Institute 706, The Second Academy China Aerospace Science & Industry Corp', '489BE2': 'SCI Innovations Ltd', '80FFA8': 'UNIDIS', 'E435FB': 'Sabre Technology (Hull) Ltd', 'C83B45': 'JRI', 'E878A1': 'BEOVIEW INTERCOM DOO', 'CCEF48': 'Cisco Systems, Inc', 'F04B6A': 'Scientific Production Association Siberian Arsenal, Ltd.', '64AE0C': 'Cisco Systems, Inc', 'E8DA96': 'Zhuhai Tianrui Electrical Power Tech. Co., Ltd.', 'B4D8DE': 'iota Computing, Inc.', 'C8903E': 'Pakton Technologies', '54CDA7': 'Fujian Shenzhou Electronic Co.,Ltd', '886B76': 'CHINA HOPEFUL GROUP HOPEFUL ELECTRIC CO.,LTD', '78F7D0': 'Silverbrook Research', '207600': 'Actiontec Electronics, Inc', 'F013C3': 'SHENZHEN FENDA TECHNOLOGY CO., LTD', '04A82A': 'Nokia Corporation', 'E44E18': 'Gardasoft VisionLimited', '2046A1': 'VECOW Co., Ltd', 'FC01CD': 'FUNDACION TEKNIKER', '9C8BF1': 'The Warehouse Limited', 'DC2E6A': 'HCT. Co., Ltd.', '148A70': 'ADS GmbH', '00B338': 'Kontron Asia Pacific Design Sdn. Bhd', '84248D': 'Zebra Technologies Inc', 'FCE892': 'Hangzhou Lancable Technology Co.,Ltd', '1071F9': 'Cloud Telecomputers, LLC', 'B8621F': 'Cisco Systems, Inc', 'F0022B': 'Chrontel', 'D453AF': 'VIGO System S.A.', '18AD4D': 'Polostar Technology Corporation', '94C6EB': 'NOVA electronics, Inc.', '843F4E': 'Tri-Tech Manufacturing, Inc.', 'C83232': 'Hunting Innova', '549478': 'Silvershore Technology Partners', 'A06E50': 'Nanotek Elektronik Sistemler Ltd. Sti.', '4C774F': 'Embedded Wireless Labs ', 'D0C282': 'Cisco Systems, Inc', '147DB3': 'JOA TELECOM.CO.,LTD', 'ECBD09': 'FUSION Electronics Ltd', '944696': 'BaudTec Corporation', '54847B': 'Digital Devices GmbH', '3C2763': 'SLE quality engineering GmbH & Co. KG', 'B0F1BC': 'Dhemax Ingenieros Ltda', 'B8288B': 'Parker Hannifin Manufacturing (UK) Ltd', '90D11B': 'Palomar Medical Technologies', '34A55D': 'TECHNOSOFT INTERNATIONAL SRL', '802E14': 'azeti Networks AG', 'D4C1FC': 'Nokia Corporation', '34BCA6': 'Beijing Ding Qing Technology, Ltd.', '5835D9': 'Cisco Systems, Inc', '64D912': 'Solidica, Inc.', 'C47B2F': 'Beijing JoinHope Image Technology Ltd.', '508ACB': 'SHENZHEN MAXMADE TECHNOLOGY CO., LTD.', '3CD16E': 'Telepower Communication Co., Ltd', 'FC2E2D': 'Lorom Industrial Co.LTD.', '40040C': 'A&T', 'DC3C84': 'Ticom Geomatics, Inc.', 'D0131E': 'Sunrex Technology Corp', '00FC70': 'Intrepid Control Systems, Inc.', '703AD8': 'Shenzhen Afoundry Electronic Co., Ltd', '704AAE': 'Xstream Flow (Pty) Ltd', '40B3FC': 'Logital Co. Limited ', 'A4134E': 'Luxul ', 'B09928': 'FUJITSU LIMITED', '04E1C8': 'IMS Soluções em Energia Ltda.', '948FEE': 'Verizon Telematics', '50D6D7': 'Takahata Precision', '88F077': 'Cisco Systems, Inc', '587521': 'CJSC RTSoft', 'C40F09': 'Hermes electronic GmbH', '48F47D': 'TechVision Holding Internation Limited', 'F081AF': 'IRZ AUTOMATION TECHNOLOGIES LTD', '701404': 'Limited Liability Company', 'B435F7': 'Zhejiang Pearmain Electronics Co.ltd.', '9866EA': 'Industrial Control Communications, Inc.', '983000': 'Beijing KEMACOM Technologies Co., Ltd.', '90CF15': 'Nokia Corporation', '948B03': 'EAGET Innovation and Technology Co., Ltd.', '2C0033': 'EControls, LLC', 'AC199F': 'SUNGROW POWER SUPPLY CO.,LTD.', '7C4A82': 'Portsmith LLC', '94E848': 'FYLDE MICRO LTD', 'AC5E8C': 'Utillink', '18E288': 'STT Condigi', '1C35F1': 'NEW Lift Neue Elektronische Wege Steuerungsbau GmbH', '803457': 'OT Systems Limited', '5C0CBB': 'CELIZION Inc.', 'C4242E': 'Galvanic Applied Sciences Inc', '24C86E': 'Chaney Instrument Co.', 'F0AE51': 'Xi3 Corp', 'B80B9D': 'ROPEX Industrie-Elektronik GmbH', '306118': 'Paradom Inc.', '4C7367': 'Genius Bytes Software Solutions GmbH', '90EA60': 'SPI Lasers Ltd ', '5070E5': 'He Shan World Fair Electronics Technology Limited', '802275': 'Beijing Beny Wave Technology Co Ltd', 'CCF3A5': 'Chi Mei Communication Systems, Inc', '14A9E3': 'MST CORPORATION', 'F8EA0A': 'Dipl.-Math. Michael Rauch', '3831AC': 'WEG', '584C19': 'Chongqing Guohong Technology Development Company Limited', '6469BC': 'Hytera Communications Co .,ltd', 'B4F323': 'PETATEL INC.', '285132': 'Shenzhen Prayfly Technology Co.,Ltd', 'E42FF6': 'Unicore communication Inc.', '84D9C8': 'Unipattern Co.,', '94AAB8': 'Joview(Beijing) Technology Co. Ltd.', '28F358': '2C - Trifonov & Co', '14C21D': 'Sabtech Industries', 'C88439': 'Sunrise Technologies', 'F0BF97': 'Sony Corporation', 'C44AD0': 'FIREFLIES SYSTEMS', 'EC7D9D': 'CPI', 'C81E8E': 'ADV Security (S) Pte Ltd', 'A88792': 'Broadband Antenna Tracking Systems', '14F0C5': 'Xtremio Ltd.', 'E8C229': 'H-Displays (MSC) Bhd', '3CA72B': 'MRV Communications (Networks) LTD', '301A28': 'Mako Networks Ltd', '04E662': 'Acroname Inc.', 'F87B8C': 'Amped Wireless', '283410': 'Enigma Diagnostics Limited', '0CE82F': 'Bonfiglioli Vectron GmbH', '40F4EC': 'Cisco Systems, Inc', '14B73D': 'ARCHEAN Technologies', '948D50': 'Beamex Oy Ab', 'A433D1': 'Fibrlink Communications Co.,Ltd.', '5CBD9E': 'HONGKONG MIRACLE EAGLE TECHNOLOGY(GROUP) LIMITED', '08E672': 'JEBSEE ELECTRONICS CO.,LTD.', 'B8E589': 'Payter BV', '88E0A0': 'Shenzhen VisionSTOR Technologies Co., Ltd', 'FC10BD': 'Control Sistematizado S.A.', 'F0C27C': 'Mianyang Netop Telecom Equipment Co.,Ltd.', '241A8C': 'Squarehead Technology AS', 'D44F80': 'Kemper Digital GmbH', 'A41BC0': 'Fastec Imaging Corporation', '205B2A': 'Private', 'F40321': 'BeNeXt B.V.', 'A071A9': 'Nokia Corporation', 'A4E32E': 'Silicon & Software Systems Ltd.', 'C8C126': 'ZPM Industria e Comercio Ltda', '64DE1C': 'Kingnetic Pte Ltd', 'A862A2': 'JIWUMEDIA CO., LTD.', '984E97': 'Starlight Marketing (H. K.) Ltd.', '64DC01': 'Static Systems Group PLC', 'FC1FC0': 'EURECAM', 'BC6784': 'Environics Oy', '68DCE8': 'PacketStorm Communications', '488E42': 'DIGALOG GmbH', '607688': 'Velodyne', '78CD8E': 'SMC Networks Inc', '2C8065': 'HARTING Inc. of North America', '3CC0C6': 'd&b audiotechnik GmbH', '4468AB': 'JUIN COMPANY, LIMITED', 'F81037': 'Atopia Systems, LP', '78A683': 'Precidata', 'F02572': 'Cisco Systems, Inc', '04FF51': 'NOVAMEDIA INNOVISION SP. Z O.O.', '4CB4EA': 'HRD (S) PTE., LTD.', 'D44C24': 'Vuppalamritha Magnetic Components LTD', 'F8C678': 'Carefusion', '6CAB4D': 'Digital Payment Technologies', '2CB0DF': 'Soliton Technologies Pvt Ltd', 'ECE555': 'Hirschmann Automation', '58F98E': 'SECUDOS GmbH', 'B4C44E': 'VXL eTech Pvt Ltd', '707EDE': 'NASTEC LTD.', 'C07E40': 'SHENZHEN XDK COMMUNICATION EQUIPMENT CO.,LTD', 'E44F29': 'MA Lighting Technology GmbH', 'B4749F': 'ASKEY COMPUTER CORP', '7C4AA8': 'MindTree Wireless PVT Ltd', '8091C0': 'AgileMesh, Inc.', '084EBF': 'Broad Net Mux Corporation', 'E05FB9': 'Cisco Systems, Inc', 'E0143E': 'Modoosis Inc.', '90D852': 'Comtec Co., Ltd.', '380197': 'TSST Global,Inc', 'AC02CF': 'RW Tecnologia Industria e Comercio Ltda', 'D41296': 'Anobit Technologies Ltd.', '48174C': 'MicroPower technologies', '349A0D': 'ZBD Displays Ltd', '90507B': 'Advanced PANMOBIL Systems GmbH & Co. KG', '0876FF': 'Thomson Telecom Belgium', '1C7C11': 'EID ', '20AA25': 'IP-NET LLC', 'C4EEF5': 'II-VI Incorporated', 'E0CF2D': 'Gemintek Corporation', 'D491AF': 'Electroacustica General Iberica, S.A.', 'C4B512': 'General Electric Digital Energy', '0034F1': 'Radicom Research, Inc.', '9433DD': 'Taco Inc', 'E02538': 'Titan Pet Products', 'CC7A30': 'CMAX Wireless Co., Ltd.', 'B88E3A': 'Infinite Technologies JLT', '588D09': 'Cisco Systems, Inc', 'C0C1C0': 'Cisco-Linksys, LLC', '6015C7': 'IdaTech', 'DC2008': 'ASD Electronics Ltd ', '1C83B0': 'Linked IP GmbH', 'A4D1D1': 'ECOtality North America', 'C49313': '100fio networks technology llc', '7C3920': 'SSOMA SECURITY', '28C0DA': 'Juniper Networks', '9C77AA': 'NADASNV', '10E8EE': 'PhaseSpace', 'A47C1F': 'Cobham plc', 'D46CDA': 'CSM GmbH', '5CD998': 'D-Link Corporation', '68597F': 'Alcatel Lucent', 'F065DD': 'Primax Electronics Ltd.', '706582': 'Suzhou Hanming Technologies Co., Ltd.', '34D2C4': 'RENA GmbH Print Systeme', 'D4CBAF': 'Nokia Corporation', '045D56': 'camtron industrial inc.', '68234B': 'Nihon Dengyo Kousaku', '1C3DE7': 'Sigma Koki Co.,Ltd.', '58BC27': 'Cisco Systems, Inc', '20D607': 'Nokia Corporation', '6CE0B0': 'SOUND4', '9CFFBE': 'OTSL Inc.', '00F860': 'PT. Panggung Electric Citrabuana', 'B8BA72': 'Cynove', '443D21': 'Nuvolt', '30493B': 'Nanjing Z-Com Wireless Co.,Ltd', 'A45C27': 'Nintendo Co., Ltd.', '6C0460': 'RBH Access Technologies Inc.', '706417': 'ORBIS TECNOLOGIA ELECTRICA S.A.', '18EF63': 'Cisco Systems, Inc', '206FEC': 'Braemac CA LLC', '100D32': 'Embedian, Inc.', '88ACC1': 'Generiton Co., Ltd. ', '8818AE': 'Tamron Co., Ltd', '7CED8D': 'Microsoft', 'A4BE61': 'EutroVision System, Inc.', 'D07DE5': 'Forward Pay Systems, Inc.', '04DD4C': 'Velocytech', 'A40CC3': 'Cisco Systems, Inc', '4CBAA3': 'Bison Electronics Inc.', 'A8B1D4': 'Cisco Systems, Inc', 'EC7C74': 'Justone Technologies Co., Ltd.', 'CCFCB1': 'Wireless Technology, Inc.', '3C1A79': 'Huayuan Technology CO.,LTD', '9CF61A': 'UTC Fire and Security', '7CF098': 'Bee Beans Technologies, Inc.', 'EC66D1': 'B&W Group LTD', '385FC3': 'Yu Jeong System, Co.Ltd', '888B5D': 'Storage Appliance Corporation ', '78C6BB': 'Innovasic, Inc.', '84A991': 'Cyber Trans Japan Co.,Ltd.', '68784C': 'Nortel Networks', 'F8D756': 'Simm Tronic Limited ', '04A3F3': 'Emicon', '1C17D3': 'Cisco Systems, Inc', '7CE044': 'NEON Inc', '284C53': 'Intune Networks', '64D02D': 'Next Generation Integration (NGI)', '90513F': 'Elettronica Santerno SpA', '8841C1': 'ORBISAT DA AMAZONIA IND E AEROL SA', '9C7514': 'Wildix srl', '4CF737': 'SamJi Electronics Co., Ltd', 'F0D767': 'Axema Passagekontroll AB', 'C84C75': 'Cisco Systems, Inc', 'C802A6': 'Beijing Newmine Technology', '4C8B55': 'Grupo Digicon', '6C5CDE': 'SunReports, Inc.', '34F39B': 'WizLAN Ltd.', 'E86CDA': 'Supercomputers and Neurocomputers Research Center', '240B2A': 'Viettel Group', '00B5D6': 'Omnibit Inc.', '548922': 'Zelfy Inc', '50C58D': 'Juniper Networks', '24A42C': 'KOUKAAM a.s.', '4C3089': 'Thales Transportation Systems GmbH', '481249': 'Luxcom Technologies Inc.', '24A937': 'PURE Storage', '348302': 'iFORCOM Co., Ltd', 'B43DB2': 'Degreane Horizon', '84F64C': 'Cross Point BV', 'C08B6F': 'S I Sistemas Inteligentes Eletrônicos Ltda', 'F86ECF': 'Arcx Inc', '8C8401': 'Private', '408493': 'Clavister AB', '78A6BD': 'DAEYEON Control&Instrument Co,.Ltd', '3C1915': 'GFI Chrono Time', 'ECB106': 'Acuro Networks, Inc', 'C835B8': 'Ericsson, EAB/RWI/K', 'F89D0D': 'Control Technology Inc.', '2C3F3E': 'Alge-Timing GmbH', '089F97': 'LEROY AUTOMATION', '34BA51': 'Se-Kure Controls, Inc.', '6C7039': 'Novar GmbH', '982D56': 'Resolution Audio', '147373': 'TUBITAK UEKAE', 'FCCF62': 'IBM Corp', '084E1C': 'H2A Systems, LLC', '88B627': 'Gembird Europe BV', 'F06853': 'Integrated Corporation', 'A4ADB8': 'Vitec Group, Camera Dynamics Ltd', 'A4B121': 'Arantia 2010 S.L.', 'E02636': 'Nortel Networks', '5C57C8': 'Nokia Corporation', 'D46CBF': 'Goodrich ISR', 'E02630': 'Intrigue Technologies, Inc.', 'ECC882': 'Cisco Systems, Inc', '6CFDB9': 'Proware Technologies Co Ltd.', '10189E': 'Elmo Motion Control', '8C56C5': 'Nintendo Co., Ltd.', 'CCB888': 'AnB Securite s.a.', '6C5E7A': 'Ubiquitous Internet Telecom Co., Ltd', 'B42CBE': 'Direct Payment Solutions Limited', 'CC2218': 'InnoDigital Co., Ltd.', 'C86C1E': 'Display Systems Ltd', 'A01859': 'Shenzhen Yidashi Electronics Co Ltd', 'E8056D': 'Nortel Networks', 'C45976': 'Fugoo Coorporation', '90A7C1': 'Pakedge Device and Software Inc.', '80BAAC': 'TeleAdapt Ltd', '502DF4': 'Phytec Messtechnik GmbH', '2CCD27': 'Precor Inc', '104369': 'Soundmax Electronic Limited ', 'C06C0F': 'Dobbs Stanford', 'C86CB6': 'Optcom Co., Ltd.', '5849BA': 'Chitai Electronic Corp.', '00D11C': 'ACETEL', '601D0F': 'Midnite Solar', 'A8F94B': 'Eltex Enterprise Ltd.', '0C8230': 'SHENZHEN MAGNUS TECHNOLOGIES CO.,LTD', '746B82': 'MOVEK ', '9CC077': 'PrintCounts, LLC', '3CB17F': 'Wattwatchers Pty Ld', 'CC5459': 'OnTime Networks AS', 'F8DC7A': 'Variscite LTD', 'D4F143': 'IPROAD.,Inc', 'B8F732': 'Aryaka Networks Inc', 'E8DFF2': 'PRF Co., Ltd.', 'B4ED54': 'Wohler Technologies', '006440': 'Cisco Systems, Inc', '94C4E9': 'PowerLayer Microsystems HongKong Limited', '8843E1': 'Cisco Systems, Inc', '34862A': 'Heinz Lackmann GmbH & Co KG', 'ACE348': 'MadgeTech, Inc', '549A16': 'Uzushio Electric Co.,Ltd.', '9018AE': 'Shanghai Meridian Technologies, Co. Ltd.', '0494A1': 'CATCH THE WIND INC', '003A99': 'Cisco Systems, Inc', '003A9A': 'Cisco Systems, Inc', '003A98': 'Cisco Systems, Inc', '50A6E3': 'David Clark Company', '50934F': 'Gradual Tecnologia Ltda.', 'B8B1C7': 'BT&COM CO.,LTD', 'DC2C26': 'Iton Technology Limited', 'D411D6': 'ShotSpotter, Inc.', '9CAFCA': 'Cisco Systems, Inc', '1C0FCF': 'Sypro Optics GmbH', '9C4E8E': 'ALT Systems Ltd', '7072CF': 'EdgeCore Networks', 'ECD00E': 'MiraeRecognition Co., Ltd.', 'A4AD00': 'Ragsdale Technology', '4C9EE4': 'Hanyang Navicom Co.,Ltd.', 'C87248': 'Aplicom Oy', 'C47D4F': 'Cisco Systems, Inc', '3CDF1E': 'Cisco Systems, Inc', '986DC8': 'TOSHIBA MITSUBISHI-ELECTRIC INDUSTRIAL SYSTEMS CORPORATION', '6CAC60': 'Venetex Corp', 'F04BF2': 'JTECH Communications, Inc.', '042BBB': 'PicoCELA, Inc.', 'FC0877': 'Prentke Romich Company', '64F970': 'Kenade Electronics Technology Co.,LTD.', 'C82E94': 'Halfa Enterprise Co., Ltd.', '80177D': 'Nortel Networks', '7C7BE4': "Z'SEDAI KENKYUSHO CORPORATION", 'F0DE71': 'Shanghai EDO Technologies Co.,Ltd.', '60D30A': 'Quatius Limited', '24CF21': 'Shenzhen State Micro Technology Co., Ltd', '10BAA5': 'GANA I&C CO., LTD', 'BC9DA5': 'DASCOM Europe GmbH', '28FBD3': 'Ragentek Technology Group', '586ED6': 'Private', 'EC3091': 'Cisco Systems, Inc', '64BC11': 'CombiQ AB', 'C8C13C': 'RuggedTek Hangzhou Co., Ltd', 'F4ACC1': 'Cisco Systems, Inc', '4097D1': 'BK Electronics cc', '0CE936': 'ELIMOS srl', 'A02EF3': 'United Integrated Services Co., Led.', 'A09805': 'OpenVox Communication Co Ltd', '60391F': 'ABB Ltd', 'E8A4C1': 'Deep Sea Electronics Ltd', 'C8D2C1': 'Jetlun (Shenzhen) Corporation', 'E09153': 'XAVi Technologies Corp.', '88A5BD': 'QPCOM INC.', 'D4C766': 'Acentic GmbH', '002712': 'MaxVision LLC', '00271F': 'MIPRO Electronics Co., Ltd', '00270C': 'Cisco Systems, Inc', '0026CF': 'DEKA R&D', '0026E7': 'Shanghai ONLAN Communication Tech. Co., Ltd.', '0026E0': 'ASITEQ', '002703': 'Testech Electronics Pte Ltd', '0026F3': 'SMC Networks', '0026A5': 'MICROROBOT.CO.,LTD', '0026A3': 'FQ Ingenieria Electronica S.A.', '00269D': 'M2Mnet Co., Ltd.', '002697': 'Alpha Technologies Inc.', '00268A': 'Terrier SC Ltd', '002689': 'General Dynamics Robotic Systems', '0026C5': 'Guangdong Gosun Telecommunications Co.,Ltd', '0026C4': 'Cadmos microsystems S.r.l.', '0026C8': 'System Sensor', '00267A': 'wuhan hongxin telecommunication technologies co.,ltd', '0026C2': 'SCDI Co. LTD', '002685': 'Digital Innovation', '0026A9': 'Strong Technologies Pty Ltd', '002672': 'AAMP of America', '0025FD': 'OBR Centrum Techniki Morskiej S.A.', '002600': 'TEAC Australia Pty Ltd.', '0025FF': 'CreNova Multimedia Co., Ltd', '002604': 'Audio Processing Technology Ltd', '002659': 'Nintendo Co., Ltd.', '002651': 'Cisco Systems, Inc', '002612': 'Space Exploration Technologies', '002616': 'Rosemount Inc.', '00260B': 'Cisco Systems, Inc', '002623': 'JRD Communication Inc', '002627': 'Truesell', '00264E': 'Rail & Road Protec GmbH', '00264F': 'Krüger &Gothe GmbH', '002621': 'InteliCloud Technology Inc.', '00261C': 'NEOVIA INC.', '002664': 'Core System Japan', '002639': 'T.M. Electronics, Inc.', '0025C6': 'kasercorp, ltd', '0025C5': 'Star Link Communication Pvt. Ltd.', '0025C7': 'altek Corporation', '0025E6': 'Belgian Monitoring Systems bvba', '0025E0': 'CeedTec Sdn Bhd', '0025DE': 'Probits Co., LTD.', '0025B0': 'Schmartz Inc', '0025AD': 'Manufacturing Resources International', '0025AC': 'I-Tech corporation', '0025AB': 'AIO LCD PC BU / TPV', '0025EC': 'Humanware', '0025ED': 'NuVo Technologies LLC', '0025E9': 'i-mate Development, Inc.', '0025BA': 'Alcatel-Lucent IPD', '0025BB': 'INNERINT Co., Ltd.', '0025B8': 'Agile Communications, Inc.', '0025B1': 'Maya-Creation Corporation', '0025A1': 'Enalasys', '0025F3': 'Nordwestdeutsche Zählerrevision', '0025DD': 'SUNNYTEK INFORMATION CO., LTD.', '0025CE': 'InnerSpace', '002549': 'Jeorich Tech. Co.,Ltd.', '002539': 'IfTA GmbH', '002537': 'Runcom Technologies Ltd.', '002538': 'Samsung Electronics Co., Ltd., Memory Division', '002544': 'LoJack Corporation', '002532': 'Digital Recorders', '00255D': 'Morningstar Corporation', '002558': 'MPEDIA', '00254A': 'RingCube Technologies, Inc.', '00254F': 'ELETTROLAB Srl', '002583': 'Cisco Systems, Inc', '002591': 'NEXTEK, Inc.', '00258D': 'Haier', '002571': 'Zhejiang Tianle Digital Electric Co.,Ltd', '00259A': 'CEStronics GmbH', '002502': 'NaturalPoint', '0024F8': 'Technical Solutions Company Ltd.', '0024F9': 'Cisco Systems, Inc', '0024F2': 'Uniphone Telecommunication Co., Ltd.', '002514': "PC Worth Int'l Co., Ltd.", '00250B': 'CENTROFACTOR INC', '002506': 'A.I. ANTITACCHEGGIO ITALIA SRL', '00251C': 'EDT', '00251A': 'Psiber Data Systems Inc.', '0024E1': 'Convey Computer Corp.', '0024EE': 'Wynmax Inc.', '0024E3': 'CAO Group', '002507': 'ASTAK Inc.', '0024D9': 'BICOM, Inc.', '00248E': 'Infoware ZRt.', '002494': 'Shenzhen Baoxin Tech CO., Ltd.', '002489': 'Vodafone Omnitel N.V.', '00246F': 'Onda Communication spa', '002469': 'Smart Doorphones', '00247F': 'Nortel Networks', '002475': 'Compass System(Embedded Dept.)', '0024C3': 'Cisco Systems, Inc', '0024B5': 'Nortel Networks', '0024B0': 'ESAB AB', '0024C4': 'Cisco Systems, Inc', '00249D': 'NES Technology Inc.', '002464': 'Bridge Technologies Co AS', '002462': 'Rayzone Corporation', '002420': 'NetUP Inc.', '00241E': 'Nintendo Co., Ltd.', '00241F': 'DCT-Delta GmbH', '00240E': 'Inventec Besta Co., Ltd.', '002460': 'Giaval Science Development Co. Ltd.', '00245C': 'Design-Com Technologies Pty. Ltd.', '00243D': 'Emerson Appliance Motors and Controls', '002437': 'Motorola - BSG', '002444': 'Nintendo Co., Ltd.', '002413': 'Cisco Systems, Inc', '00244F': 'Asantron Technologies Ltd.', '0023DD': 'ELGIN S.A.', '0023DE': 'Ansync Inc.', '0023D9': 'Banner Engineering', '0023DA': 'Industrial Computer Source (Deutschland)GmbH', '002405': 'Dilog Nordic AB', '0023E8': 'Demco Corp.', '0023E3': 'Microtronic AG', '0023B7': 'Q-Light Co., Ltd.', '0023FD': 'AFT Atlas Fahrzeugtechnik GmbH', '0023EF': 'Zuend Systemtechnik AG', '0023AC': 'Cisco Systems, Inc', '0023D8': 'Ball-It Oy', '00239F': 'Institut für Prüftechnik', '00239D': 'Mapower Electronics Co., Ltd', '00239C': 'Juniper Networks', '002398': 'Vutlan sro', '00235B': 'Gulfstream', '002359': 'Benchmark Electronics ( Thailand ) Public Company Limited', '002357': 'Pitronot Technologies and Engineering P.T.E. Ltd.', '002355': 'Kinco Automation(Shanghai) Ltd.', '002373': 'GridIron Systems, Inc.', '002367': 'UniControls a.s.', '002368': 'Zebra Technologies Inc', '00236E': 'Burster GmbH & Co KG', '002366': 'Beijing Siasun Electronic System Co.,Ltd.', '00238F': 'NIDEC COPAL CORPORATION', '002380': 'Nanoteq', '00233D': 'Laird Technologies', '00233F': 'Purechoice Inc', '00231B': 'Danaher Motion - Kollmorgen', '00231E': 'Cezzer Multimedia Technologies', '00231F': 'Guangda Electronic & Telecommunication Technology Development Co., Ltd.', '002270': 'ABK North America, LLC', '002313': 'Qool Technologies Ltd.', '002310': 'LNC Technology Co., Ltd.', '0022CD': 'Ared Technology Co., Ltd.', '0022CC': 'SciLog, Inc.', '0022CB': 'IONODES Inc.', '0022C6': 'Sutus Inc', '0022E8': 'Applition Co., Ltd.', '0022E9': 'ProVision Communications', '0022E6': 'Intelligent Data', '0022E3': 'Amerigon', '0022EB': 'Data Respons A/S', '0022EF': 'iWDL Technologies', '0022F2': 'SunPower Corp', '0022E2': 'WABTEC Transit Division', '002301': 'Witron Technology Limited', '0022F7': 'Conceptronic', '00230C': 'CLOVER ELECTRONICS CO.,LTD.', '002334': 'Cisco Systems, Inc', '0022C8': 'Applied Instruments B.V.', '0022C0': 'Shenzhen Forcelink Electronic Co, Ltd', '0022A6': 'Sony Computer Entertainment America', '0022A7': 'Tyco Electronics AMP GmbH', '0022A1': 'Huawei Symantec Technologies Co.,Ltd.', '00229D': 'PYUNG-HWA IND.CO.,LTD', '002296': 'LinoWave Corporation', '002244': 'Chengdu Linkon Communications Device Co., Ltd', '002250': 'Point Six Wireless, LLC', '00226F': '3onedata Technology Co. Ltd.', '002278': 'Shenzhen Tongfang Multimedia Technology Co.,Ltd.', '00227A': 'Telecom Design', '002260': 'AFREEY Inc.', '0021EF': 'Kapsys', '0021ED': 'Telegesis', '0021EB': 'ESP SYSTEMS, LLC', '002237': 'Shinhint Group', '00222F': 'Open Grid Computing, Inc.', '0021F6': 'Oracle Corporation', '002206': 'Cyberdyne Inc.', '002202': 'Excito Elektronik i Skåne AB', '002227': 'uv-electronic GmbH', '00221E': 'Media Devices Co., Ltd.', '002225': 'Thales Avionics Ltd', '002220': 'Mitac Technology Corp', '00220E': 'Indigo Security Co., Ltd.', '002207': 'Inteno Broadband Technology AB', '00223E': 'IRTrans GmbH', '0021CE': 'NTC-Metrotek', '0021CA': 'ART System Co., Ltd.', '0021CB': 'SMS TECNOLOGIA ELETRONICA LTDA', '0021C8': 'LOHUIS Networks', '0021DB': 'Santachi Video Technology (Shenzhen) Co., Ltd.', '0021BF': 'Hitachi High-Tech Control Systems Corporation', '0021BC': 'ZALA COMPUTER', '0021B4': 'APRO MEDIA CO., LTD', '0021A8': 'Telephonics Corporation', '0021A9': 'Mobilink Telecom Co.,Ltd', '00218D': 'AP Router Ind. Eletronica LTDA', '002190': 'Goliath Solutions', '002185': "MICRO-STAR INT'L CO.,LTD.", '00219F': 'SATEL OY', '002196': 'Telsey S.p.A.', '002182': 'SandLinks Systems, Ltd.', '002183': 'ANDRITZ HYDRO GmbH', '0021DF': 'Martin Christ GmbH', '0021D4': 'Vollmer Werke GmbH', '0021D6': 'LXI Consortium', '0021A6': 'Videotec Spa', '002159': 'Juniper Networks', '002155': 'Cisco Systems, Inc', '002157': 'National Datacast, Inc.', '00213B': 'Berkshire Products, Inc', '002137': 'Bay Controls, LLC', '002139': 'Escherlogic Inc.', '00212C': 'SemIndia System Private Limited', '00212B': 'MSA Auer', '002174': 'AvaLAN Wireless', '002179': 'IOGEAR, Inc.', '002168': 'iVeia, LLC', '002150': 'EYEVIEW ELECTRONICS', '00214D': 'Guangzhou Skytone Transmission Technology Com. Ltd.', '00212A': 'Audiovox Corporation', '00215E': 'IBM Corp', '001FF6': 'PS Audio International', '002110': 'Clearbox Systems', '00210C': 'Cymtec Systems, Inc.', '00210B': 'GEMINI TRAZE RFID PVT. LTD.', '001FDD': 'GDI LLC', '001FDA': 'Nortel Networks', '002104': 'Gigaset Communications GmbH', '001FFB': 'Green Packet Bhd', '001FE9': 'Printrex, Inc.', '001FF0': 'Audio Partnership', '001FEA': 'Applied Media Technologies Corporation', '001FD9': 'RSD Communications Ltd', '001FCE': 'QTECH LLC', '001F8B': 'Cache IQ', '001F85': 'Apriva ISS, LLC', '001F87': 'Skydigital Inc.', '001F88': 'FMS Force Measuring Systems AG', '001F86': 'digEcor', '001FA1': 'Gtran Inc', '001F9F': 'Thomson Telecom Belgium', '001F99': 'SERONICS co.ltd', '001F80': 'Lucas Holding bv', '001B58': 'ACE CAD Enterprise Co., Ltd.', '001FB0': 'TimeIPS, Inc.', '001FAE': 'Blick South Africa (Pty) Ltd', '001F79': 'Lodam Electronics A/S', '001F71': 'xG Technology, Inc.', '001FA5': 'Blue-White Industries', '001F9D': 'Cisco Systems, Inc', '001F96': 'APROTECH CO.LTD', '001F40': 'Speakercraft Inc.', '001F6C': 'Cisco Systems, Inc', '001F6F': 'Fujian Sunnada Communication Co.,Ltd.', '001F60': 'COMPASS SYSTEMS CORP.', '001F6A': 'PacketFlux Technologies, Inc.', '001F65': 'KOREA ELECTRIC TERMINAL CO., LTD.', '001F17': 'IDX Company, Ltd.', '001F1B': 'RoyalTek Company Ltd.', '001F5E': 'Dyna Technology Co.,Ltd.', '001F55': 'Honeywell Security (China) Co., Ltd.', '001F54': 'Lorex Technology Inc.', '001F2E': "Triangle Research Int'l Pte Ltd", '001F4B': 'Lineage Power', '001F0D': 'L3 Communications - Telemetry West', '001EFC': 'JSC MASSA-K', '001F23': 'Interacoustics', '001F06': 'Integrated Dispatch Solutions', '001EBA': 'High Density Devices AS', '001EB8': 'Aloys, Inc', '001EB4': 'UNIFAT TECHNOLOGY LTD.', '001EE0': 'Urmet Domus SpA', '001EDA': 'Wesemann Elektrotechniek B.V.', '001ED7': 'H-Stream Wireless, Inc.', '001ED5': 'Tekon-Automatics', '001EE8': 'Mytek', '001EEE': 'ETL Systems Ltd', '001EFA': 'PROTEI Ltd.', '001EFB': 'Trio Motion Technology Ltd', '001EF8': 'Emfinity Inc.', '001ECB': 'RPC Energoautomatika Ltd', '001EA8': 'Datang Mobile Communications Equipment CO.,LTD', '001EAB': 'TeleWell Oy', '001E9F': 'Visioneering Systems, Inc.', '001E6B': 'Cisco SPVTG', '001E70': 'Cobham Antenna Systems', '001E61': 'ITEC GmbH', '001E3E': 'KMW Inc.', '001E38': 'Bluecard Software Technology Co., Ltd.', '001E47': 'PT. Hariff Daya Tunggal Engineering', '001E48': 'Wi-Links', '001E8A': 'eCopy, Inc', '001E9B': 'San-Eisha, Ltd.', '001E96': 'Sepura Plc', '001E59': 'Silicon Turnkey Express, LLC', '001E51': 'Converter Industry Srl', '001E71': 'MIrcom Group of Companies', '001DC4': 'AIOI Systems Co., Ltd.', '001DC0': 'Enphase Energy', '001DBD': 'Versamed Inc.', '001DF8': 'Webpro Vision Technology Corporation', '001DF9': 'Cybiotronics (Far East) Limited', '001DF7': 'R. STAHL Schaltgeräte GmbH', '001E05': 'Xseed Technologies & Computing', '001E07': 'Winy Technology Co., Ltd.', '001E0A': 'Syba Tech Limited', '001E03': 'LiComm Co., Ltd.', '001E1B': 'Digital Stream Technology, Inc.', '001E17': 'STN BV', '001E18': 'Radio Activity srl', '001E15': 'Beech Hill Electronics', '001E30': 'Shireen Inc', '001E2E': 'SIRTI S.p.A.', '001DDC': 'HangZhou DeChangLong Tech&Info Co.,Ltd', '001DEB': 'DINEC International', '001D9A': 'GODEX INTERNATIONAL CO., LTD', '001D97': 'Alertus Technologies LLC', '001D91': 'Digitize, Inc', '001D95': 'Flash, Inc.', '001D9D': 'ARTJOY INTERNATIONAL LIMITED', '001D9E': 'AXION TECHNOLOGIES', '001D70': 'Cisco Systems, Inc', '001D78': 'Invengo Information Technology Co.,Ltd', '001D6F': 'Chainzone Technology Co., Ltd', '001D7F': 'Tekron International Ltd', '001D79': 'SIGNAMAX LLC', '001DAE': 'CHANG TSENG TECHNOLOGY CO., LTD', '001DA6': 'Media Numerics Limited', '001D62': 'InPhase Technologies', '001D63': 'Miele & Cie. KG', '001DB7': 'Tendril Networks, Inc.', '001D8D': 'Fluke Process Instruments GmbH', '001D1F': 'Siauliu Tauro Televizoriai, JSC', '001D43': 'Shenzhen G-link Digital Technology Co., Ltd.', '001D3F': 'Mitron Pty Ltd', '001D39': 'MOOHADIGITAL CO., LTD', '001D37': 'Thales-Panda Transportation System', '001D13': 'NextGTV', '001D14': 'SPERADTONE INFORMATION TECHNOLOGY LIMITED', '001D10': 'LightHaus Logic, Inc.', '001D04': 'Zipit Wireless, Inc.', '001CF2': 'Tenlon Technology Co.,Ltd.', '001D30': 'YX Wireless S.A.', '001CB2': 'BPT SPA', '001CB5': 'Neihua Network Technology Co.,LTD.(NHN)', '001CB4': 'Iridium Satellite LLC', '001CB6': 'Duzon CNT Co., Ltd.', '001CC7': 'Rembrandt Technologies, LLC d/b/a REMSTREAM', '001CBB': 'MusicianLink', '001C8D': 'Mesa Imaging', '001C89': 'Force Communications, Inc.', '001C87': 'Uriver Inc.', '001CCD': 'Alektrona Corporation', '001CEC': 'Mobilesoft (Aust.) Pty Ltd', '001CE8': 'Cummins Inc', '001CD0': 'Circleone Co.,Ltd.', '001C9F': 'Razorstream, LLC', '001C7D': 'Excelpoint Manufacturing Pte Ltd', '001C5C': 'Integrated Medical Systems, Inc.', '001C52': 'VISIONEE SRL', '001C47': 'Hangzhou Hollysys Automation Co., Ltd', '001C16': 'ThyssenKrupp Elevator', '001C19': 'secunet Security Networks AG', '001C6C': '30805', '001C61': 'Galaxy Microsystems LImited', '001C3B': 'AmRoad Technology Inc.', '001C3F': 'International Police Technologies, Inc.', '001C28': 'Sphairon Technologies GmbH ', '001C1F': 'Quest Retail Technology Pty Ltd', '001C32': 'Telian Corporation', '001C2B': 'Alertme.com Limited', '001C77': 'Prodys', '001C6F': 'Emfit Ltd', '001C49': 'Zoltan Technology Inc.', '001C63': 'TRUEN', '001BDF': 'Iskra Sistemi d.d.', '001BD9': 'Edgewater Wireless Systems Inc', '001BC7': 'StarVedia Technology Inc.', '001BEC': 'Netio Technologies Co., Ltd', '001C09': 'SAE Electronic Co.,Ltd.', '001C0C': 'TANITA Corporation', '001BA6': 'intotech inc.', '001BA4': 'S.A.E Afikim', '001BB4': 'Airvod Limited', '001BB6': 'Bird Electronic Corp.', '001BE8': 'Ultratronik GmbH', '001BE1': 'ViaLogy', '001B93': 'JC Decaux SA DNT', '001B9B': 'Hose-McCann Communications', '001B9C': 'SATEL sp. z o.o.', '001B92': 'l-acoustics', '001B8E': 'Hulu Sweden AB', '001B45': 'ABB AS, Division Automation Products', '001B3F': 'ProCurve Networking by HP', '001B41': 'General Infinity Co.,Ltd.', '001B50': 'Nizhny Novgorod Factory named after M.Frunze, FSUE (NZiF)', '001B47': 'Futarque A/S', '001B6C': 'LookX Digital Media BV', '001B6B': 'Swyx Solutions AG', '001B69': 'Equaline Corporation', '001B76': 'Ripcode, Inc.', '001B70': 'IRI Ubiteq, INC.', '001B68': 'Modnnet Co., Ltd', '001B62': 'JHT Optoelectronics Co.,Ltd.', '001B8A': '2M Electronic A/S', '001B80': 'LORD Corporation', '001B3E': 'Curtis, Inc.', '001B37': 'Computec Oy', '001B07': 'Mendocino Software', '001B08': 'Danfoss Drives A/S', '001B01': 'Applied Radio Technologies', '001B02': 'ED Co.Ltd', '001AFC': 'ModusLink Corporation', '001B10': 'ShenZhen Kang Hui Technology Co.,ltd', '001B0B': 'Phidgets Inc.', '001B0C': 'Cisco Systems, Inc', '001AE0': 'Mythology Tech Express Inc.', '001AE2': 'Cisco Systems, Inc', '001AD7': 'Christie Digital Systems, Inc.', '001B23': 'SimpleComTools', '001AF6': 'Woven Systems, Inc.', '001AF9': 'AeroVIronment (AV Inc)', '001B30': 'Solitech Inc.', '001B18': 'Tsuken Electric Ind. Co.,Ltd', '001AE7': 'Aztek Networks, Inc.', '001A95': 'Hisense Mobile Communications Technoligy Co.,Ltd.', '001A81': 'Zelax', '001A87': 'Canhold International Limited', '001A88': 'Venergy,Co,Ltd', '001AC1': '3Com Ltd', '001ABB': 'Fontal Technology Incorporation', '001ABD': 'Impatica Inc.', '001AAE': 'Savant Systems LLC', '001ACD': 'Tidel Engineering LP', '001AC9': 'SUZUKEN CO.,LTD', '001A79': 'TELECOMUNICATION TECHNOLOGIES LTD.', '001AAA': 'Analogic Corp.', '001A8B': 'CHUNIL ELECTRIC IND., CO.', '001A8D': 'AVECS Bergen GmbH', '001AB4': 'FFEI Ltd.', '001AB5': 'Home Network System', '001AA4': 'Future University-Hakodate', '001A9F': 'A-Link Ltd', '001A74': 'Procare International Co', '001ABE': 'COMPUTER HI-TECH INC.', '001A19': 'Computer Engineering Limited', '001A18': 'Advanced Simulation Technology inc.', '001A58': 'CCV Deutschland GmbH - Celectronic eHealth Div.', '001A5E': 'Thincom Technology Co.,Ltd', '001A5C': 'Euchner GmbH+Co. KG', '001A5B': 'NetCare Service Co., Ltd.', '001A24': 'Galaxy Telecom Technologies Ltd', '001A20': 'CMOTECH Co. Ltd.', '001A4E': 'NTI AG / LinMot', '001A52': 'Meshlinx Wireless Inc.', '001A13': 'Wanlida Group Co., LTD', '001A0F': 'Sistemas Avanzados de Control, S.A.', '001A43': 'Logical Link Communications', '001A47': 'Agami Systems, Inc.', '001A2D': 'The Navvo Group', '001A2F': 'Cisco Systems, Inc', '0019A9': 'Cisco Systems, Inc', '0019AE': 'Hopling Technologies b.v.', '0019AF': 'Rigol Technologies, Inc.', '0019DE': 'MOBITEK', '0019E5': 'Lynx Studio Technology, Inc.', '0019DB': 'MICRO-STAR INTERNATIONAL CO., LTD.', '001A03': 'Angel Electronics Co., Ltd.', '0019F9': 'TDK-Lambda', '0019CE': 'Progressive Gaming International', '0019BD': 'New Media Life', '0019F2': 'Teradyne K.K.', '0019A7': 'ITU-T', '00199F': 'DKT A/S', '00198D': 'Ocean Optics, Inc.', '001982': 'SmarDTV', '001985': 'IT Watchdogs, Inc', '001951': 'NETCONS, s.r.o.', '001957': 'Saafnet Canada Inc.', '001958': 'Bluetooth SIG, Inc.', '001956': 'Cisco Systems, Inc', '00196B': 'Danpex Corporation', '00193D': 'GMC Guardian Mobility Corp.', '001986': 'Cheng Hongjian', '00199E': 'Nifty', '00196A': 'MikroM GmbH', '0018FF': 'PowerQuattro Co.', '0018F4': 'EO TECHNICS Co., Ltd.', '0018FC': 'Altec Electronic AG', '0018F6': 'Thomson Telecom Belgium', '0018F5': 'Shenzhen Streaming Video Technology Company Limited', '0018F9': 'VVOND, Inc.', '00193B': 'LigoWave', '001935': 'DUERR DENTAL AG', '001932': 'Gude Analog- und Digialsysteme GmbH', '001910': 'Knick Elektronische Messgeraete GmbH & Co. KG', '001913': 'Chuang-Yi Network Equipment Co.Ltd.', '0018FA': 'Yushin Precision Equipment Co.,Ltd.', '0018EA': 'Alltec GmbH', '0018E8': 'Hacetron Corporation', '001914': 'Winix Co., Ltd', '001906': 'Cisco Systems, Inc', '001901': 'F1MEDIA', '001931': 'Balluff GmbH', '0018E3': 'Visualgate Systems, Inc.', '00189F': 'Lenntek Corporation', '001899': 'ShenZhen jieshun Science&Technology Industry CO,LTD.', '00186D': 'Zhenjiang Sapphire Electronic Industry CO.', '00186F': 'Setha Industria Eletronica LTDA', '001875': 'AnaCise Testnology Pte Ltd', '0018C1': 'Almitec Informática e Comércio', '0018C4': 'Raba Technologies LLC', '0018C9': 'EOps Technology Limited', '0018D8': 'ARCH METER Corporation', '0018D9': 'Santosha Internatonal, Inc', '0018CF': 'Baldor Electric Company', '0018BC': 'ZAO NVP Bolid', '0018B7': 'D3 LED, LLC', '001895': 'Hansun Technologies Inc.', '001883': 'FORMOSA21 INC.', '00188E': 'Ekahau, Inc.', '001814': 'Mitutoyo Corporation', '001817': 'D. E. Shaw Research, LLC', '001811': 'Neuros Technology International, LLC.', '0017DE': 'Advantage Six Ltd', '0017D7': 'ION Geophysical Corporation Inc.', '001837': 'Universal ABIT Co., Ltd.', '001822': 'CEC TELECOM CO.,LTD.', '001820': 'w5networks', '00185D': 'TAIGUEN TECHNOLOGY (SHEN-ZHEN) CO., LTD.', '00185E': 'Nexterm Inc.', '001828': 'e2v technologies (UK) ltd.', '001835': 'Thoratec / ITC', '001801': 'Actiontec Electronics, Inc', '0017F3': 'Harris Corporation', '00184A': 'Catcher, Inc.', '00184B': 'Las Vegas Gaming, Inc.', '00180E': 'Avega Systems', '0017BC': 'Touchtunes Music Corporation', '0017C1': 'CM Precision Technology LTD.', '0017B2': 'SK Telesys', '0017B1': 'ACIST Medical Systems, Inc.', '0017A3': 'MIX s.r.l.', '0017A6': 'YOSIN ELECTRONICS CO., LTD.', '00179C': 'DEPRAG SCHULZ GMBH u. CO.', '001796': 'Rittmeyer AG', '0017E1': 'DACOS Technologies Co., Ltd.', '0017E0': 'Cisco Systems, Inc', '0017D2': 'THINLINX PTY LTD', '001785': 'Sparr Electronics Ltd', '001775': 'TTE Germany GmbH', '0017B8': 'NOVATRON CO., LTD.', '0017BB': 'Syrinx Industrial Electronics', '00177C': 'Smartlink Network Systems Limited', '001781': 'Greystone Data System, Inc.', '00178D': 'Checkpoint Systems, Inc.', '00178E': 'Gunnebo Cash Automation AB', '0017C7': 'MARA Systems Consulting AB', '00175D': 'Dongseo system.', '001750': 'GSI Group, MicroE Systems', '001755': 'GE Security', '00171D': 'DIGIT', '001718': 'Vansco Electronics Oy', '001719': 'Audiocodes USA, Inc', '001776': 'Meso Scale Diagnostics, LLC', '001779': 'QuickTel', '001767': 'Earforce AS', '001739': 'Bright Headphone Electronics Company', '00172C': 'TAEJIN INFOTECH', '001751': 'Online Corporation', '00174C': 'Millipore', '001711': 'GE Healthcare Bio-Sciences AB', '001745': 'INNOTZ CO., Ltd', '001748': 'Neokoros Brasil Ltda', '001763': 'Essentia S.p.A.', '001701': 'KDE, Inc.', '0016F6': 'Video Products Group', '0016EE': 'Royaldigital Inc.', '0016DE': 'FAST Inc', '0016DA': 'Futronic Technology Co. Ltd.', '0016D4': 'Compal Communications, Inc.', '0016D7': 'Sunways AG', '0016FF': 'Wamin Optocomm Mfg Corp', '0016D1': 'ZAT a.s.', '0016C5': 'Shenzhen Xing Feng Industry Co.,Ltd', '0016CC': 'Xcute Mobile Corp.', '001717': 'Leica Geosystems AG', '001715': 'Qstik', '00170E': 'Cisco Systems, Inc', '001705': 'Methode Electronics', '0016AA': 'Kei Communication Technology Inc.', '0016A8': 'CWT CO., LTD.', '0016A6': 'Dovado FZ-LLC', '0016ED': 'Utility, Inc', '0016C7': 'Cisco Systems, Inc', '001671': 'Symphox Information Co.', '001669': 'MRV Communication (Networks) LTD', '001668': 'Eishin Electronics', '001640': 'Asmobile Communication Inc.', '00163C': 'Rebox B.V.', '00167D': 'Sky-Line Information Co., Ltd.', '001677': 'Bihl + Wiedemann GmbH', '001655': 'FUHO TECHNOLOGY Co., LTD', '001646': 'Cisco Systems, Inc', '001648': 'SSD Company Limited', '001672': 'Zenway enterprise ltd', '00165A': 'Harman Specialty Group', '001659': 'Z.M.P. RADWAG', '0016A2': 'CentraLite Systems, Inc.', '001695': 'AVC Technology (International) Limited', '0015D8': 'Interlink Electronics', '0015D2': 'Xantech Corporation', '0015D4': 'Emitor AB', '0015D5': 'NICEVT', '00160C': 'LPL DEVELOPMENT S.A. DE C.V', '001612': 'Otsuka Electronics Co., Ltd.', '00160B': 'TVWorks LLC', '001603': 'COOLKSKY Co., LTD', '0015EA': 'Tellumat (Pty) Ltd', '0015E2': 'Dr.Ing. Herbert Knauer GmbH', '0015E1': 'Picochip Ltd', '0015DF': 'Clivet S.p.A.', '00161D': 'Innovative Wireless Technologies, Inc.', '001611': 'Altecon Srl', '001609': 'Unitech electronics co., ltd.', '0015F5': 'Sustainable Energy Systems', '0015F1': 'KYLINK Communications Corp.', '001623': 'Interval Media', '001619': 'Lancelan Technologies S.L.', '001625': 'Impinj, Inc.', '001586': 'Xiamen Overseas Chinese Electronic Co., Ltd.', '00157E': 'Weidmüller Interface GmbH & Co. KG', '001580': 'U-WAY CORPORATION', '00157C': 'Dave Networks, Inc.', '00157F': 'ChuanG International Holding CO.,LTD.', '0015B3': 'Caretech AB', '0015AA': 'Rextechnik International Co.,', '0015A6': 'Digital Electronics Products Ltd.', '00159D': 'Tripp Lite ', '0015D6': 'OSLiNK Sp. z o.o.', '001592': 'Facom UK Ltd (Melksham)', '00158B': 'Park Air Systems Ltd', '001576': 'LABiTec - Labor Biomedical Technologies GmbH', '0015BE': 'Iqua Ltd.', '0015C7': 'Cisco Systems, Inc', '00155E': 'Morgan Stanley', '00150B': 'SAGE INFOTECH LTD.', '001507': 'Renaissance Learning Inc', '001508': 'Global Target Enterprise Inc', '001502': 'BETA tech', '0014FD': 'Thecus Technology Corp.', '0014FC': 'Extandon, Inc.', '0014F8': 'Scientific Atlanta', '0014F7': 'CREVIS Co., LTD', '001515': 'Leipold+Co.GmbH', '00150F': 'mingjong', '00155C': 'Dresser Wayne', '001559': 'Securaplane Technologies, Inc.', '001557': 'Olivetti', '001554': 'Atalum Wireless S.A.', '00153B': 'EMH metering GmbH & Co. KG', '001537': 'Ventus Networks', '001533': 'NADAM.CO.,LTD', '001534': 'A Beltrónica-Companhia de Comunicações, Lda', '00153F': 'Alcatel Alenia Space Italia', '001518': 'Shenzhen 10MOONS Technology Development CO.,Ltd', '001526': 'Remote Technologies Inc', '0014F1': 'Cisco Systems, Inc', '0014EA': 'S Digm Inc. (Safe Paradigm Inc.)', '0014E5': 'Alticast', '00149F': 'System and Chips, Inc.', '0014B3': 'CoreStar International Corp', '0014B1': 'Axell Wireless Limited', '0014E0': "LET'S Corporation", '0014E2': 'datacom systems inc.', '0014E4': 'infinias, LLC', '0014CC': 'Zetec, Inc.', '0014CB': 'LifeSync Corporation', '0014C6': 'Quixant Ltd', '001498': 'Viking Design Technology', '001496': 'Phonic Corp.', '001493': 'Systimax Solutions', '0014DB': 'Elma Trenew Electronic GmbH', '00143A': 'RAYTALK INTERNATIONAL SRL', '001436': 'Qwerty Elektronik AB', '00146B': 'Anagran, Inc.', '001461': 'CORONA CORPORATION', '001462': 'Digiwell Technology, inc', '001463': 'IDCS N.V.', '001465': 'Novo Nordisk A/S', '001474': 'K40 Electronics', '00146F': 'Kohler Co', '001466': 'Kleinhenz Elektronik GmbH', '00147F': 'Thomson Telecom Belgium', '001475': 'Wiline Networks, Inc.', '001486': 'Echo Digital Audio Corporation', '001482': 'Aurora Networks', '001455': 'Coder Electronics Corporation', '00144E': 'SRISA', '00148D': 'Cubic Defense Simulation Systems', '00143D': 'Aevoe Inc.', '001415': 'Intec Automation inc.', '001410': 'Suzhou Keda Technology CO.,Ltd', '001417': 'RSE Informations Technologie GmbH', '001433': 'Empower Technologies(Canada) Inc.', '001434': 'Keri Systems, Inc', '0013DE': 'Adapt4, LLC', '0013DD': 'Abbott Diagnostics', '0013D7': 'SPIDCOM Technologies SA', '0013C7': 'IONOS Co.,Ltd.', '001423': 'J-S Co. NEUROCOM', '001425': 'Galactic Computing Corp.', '001419': 'SIDSA', '0013EE': 'JBX Designs Inc.', '0013E5': 'TENOSYS, INC.', '0013E2': 'GeoVision Inc.', '001402': 'kk-electronic a/s', '0013FF': 'Dage-MTI of MC, Inc.', '0013BC': 'Artimi Ltd', '001408': 'Eka Systems Inc.', '0013A7': 'BATTELLE MEMORIAL INSTITUTE', '0013A6': 'Extricom Ltd', '0013A2': 'MaxStream, Inc', '00139F': 'Electronics Design Services, Co., Ltd.', '0013A0': 'ALGOSYSTEM Co., Ltd.', '001398': 'TrafficSim Co.,Ltd', '00139B': 'ioIMAGE Ltd.', '001396': 'Acbel Polytech Inc.', '001393': 'Panta Systems, Inc.', '00138B': 'Phantom Technologies LLC', '001388': 'WiMedia Alliance', '00136E': 'Techmetro Corp.', '00136D': 'Tentaculus AB', '00136A': 'Hach Lange Sarl', '0013B2': 'Carallon Limited', '0013AD': 'Sendo Ltd', '0013AA': 'ALS & TEC Ltd.', '0013A4': 'KeyEye Communications', '00134D': 'Inepro BV', '00134B': 'ToGoldenNet Technology Inc.', '001384': 'Advanced Motion Controls', '00137B': 'Movon Corporation', '001353': 'HYDAC Filtertechnik GMBH', '001363': 'Verascape, Inc.', '001303': 'GateConnect', '001304': 'Flaircomm Technologies Co. LTD', '0012F9': 'URYU SEISAKU, LTD.', '0012F3': 'connectBlue AB', '001337': 'Orient Power Home Network Ltd.', '001334': 'Arkados, Inc.', '001332': 'Beijing Topsec Network Security Technology Co., Ltd.', '00131F': 'NxtPhase T&D, Corp.', '0012DC': 'SunCorp Industrial Limited', '0012FF': 'Lely Industries N.V.', '00133A': 'VadaTech Inc.', '00132A': 'Sitronics Telecom Solutions', '0012E5': 'Time America, Inc.', '00130E': 'Focusrite Audio Engineering Limited', '001309': 'Ocean Broadband Networks', '001319': 'Cisco Systems, Inc', '00131C': 'LiteTouch, Inc.', '00134A': 'Engim, Inc.', '0012D7': 'Invento Networks, Inc.', '0012C4': 'Viseon, Inc.', '001293': 'GE Energy', '001294': 'SUMITOMO ELECTRIC DEVICE INNOVATIONS, INC', '001296': 'Addlogix', '0012B3': 'Advance Wireless Technology Corp.', '0012B0': 'Efore Oyj(Plc)', '00127F': 'Cisco Systems, Inc', '0012A6': 'Dolby Australia', '0012A4': 'ThingMagic, LLC', '0012A9': '3Com Ltd', '0012D0': 'Gossen-Metrawatt-GmbH', '001299': 'Ktech Telecommunications Inc', '00128C': 'Woodward Governor', '0012B8': 'G2 Microsystems', '00127B': 'VIA Networking Technologies, Inc.', '001280': 'Cisco Systems, Inc', '001275': 'Sentilla Corporation', '001276': 'CG Power Systems Ireland Limited', '001271': 'Measurement Computing Corp', '001273': 'Stoke Inc', '001269': 'Value Electronics', '001258': 'Activis Polska', '001250': 'Tokyo Aircaft Instrument Co., Ltd.', '001252': 'Citronix, LLC', '001240': 'AMOI ELECTRONICS CO.,LTD', '00122E': 'Signal Technology - AISD', '001264': 'daum electronic gmbh', '001261': 'Adaptix, Inc', '001260': 'Stanton Magnetics,inc.', '001231': 'Motion Control Systems, Inc.', '00124B': 'Texas Instruments', '00124A': 'Dedicated Devices, Inc.', '001243': 'Cisco Systems, Inc', '0011D9': 'TiVo', '0011D2': 'Perception Digital Ltd', '0011CF': 'Thrane & Thrane A/S', '0011D4': 'NetEnrich, Inc', '0011D5': 'Hangzhou Sunyard System Engineering Co.,Ltd.', '0011F8': 'AIRAYA Corp', '0011F4': 'woori-net', '0011F6': 'Asia Pacific Microsystems , Inc.', '0011F0': 'Wideful Limited', '0011F1': 'QinetiQ Ltd', '0011ED': '802 Global', '001211': 'Protechna Herbst GmbH & Co. KG', '001219': 'General Datacomm LLC', '001216': 'ICP Internet Communication Payment AG', '001215': 'iStor Networks, Inc.', '001203': 'ActivNetworks', '0011CC': 'Guangzhou Jinpeng Group Co.,Ltd.', '0011C7': 'Raymarine UK Ltd', '0011C9': 'MTT Corporation', '0011DB': 'Land-Cellular Corporation', '001224': 'NexQL Corporation', '0011BD': 'Bombardier Transportation', '0011AA': 'Uniclass Technology, Co., LTD', '00117E': 'Midmark Corp', '001179': 'Singular Technology Co. Ltd.', '00118A': 'Viewtran Technology Limited', '001184': 'Humo Laboratory,Ltd.', '0011B1': 'BlueExpert Technology Corp.', '0011A6': 'Sypixx Networks', '0011B5': 'Shenzhen Powercom Co.,Ltd', '0011BB': 'Cisco Systems, Inc', '001166': 'Taelim Electronics Co., Ltd.', '001164': 'ACARD Technology Corp.', '0011A2': 'Manufacturing Technology Inc', '00119E': 'Solectron Brazil', '001173': 'SMART Storage Systems', '001125': 'IBM Corp', '00111C': 'Pleora Technologies Inc.', '00111F': 'Doremi Labs, Inc.', '00111D': 'Hectrix Limited', '001119': 'Solteras, Inc.', '001150': 'Belkin Corporation', '001146': 'Telecard-Pribor Ltd', '00110D': 'SANBlaze Technology, Inc.', '001106': 'Siemens NV (Belgium)', '000FF4': 'Guntermann & Drunck GmbH', '000FF8': 'Cisco Systems, Inc', '00112C': 'IZT GmbH', '001114': 'EverFocus Electronics Corp.', '00110E': 'Tsurusaki Sealand Transportation Co. Ltd.', '00114C': 'caffeina applied research ltd.', '001141': 'GoodMan Corporation', '001155': 'Sevis Systems', '001152': 'Eidsvoll Electronics AS', '000FCF': 'DataWind Research', '000FD2': 'EWA Technologies, Inc.', '000FCE': 'Kikusui Electronics Corp.', '000FEB': 'Cylon Controls', '000FDC': 'Ueda Japan Radio Co., Ltd.', '000F8E': 'DONGYANG TELECOM CO.,LTD.', '000F91': 'Aerotelecom Co.,Ltd.', '000F87': 'Maxcess International', '000FA1': 'Gigabit Systems Inc.', '000F99': 'APAC opto Electronics Inc.', '000FF5': 'GN&S company', '000FE8': 'Lobos, Inc.', '000FB2': 'Broadband Pacenet (India) Pvt. Ltd.', '000FD7': 'Harman Music Group', '000FD4': 'Soundcraft', '000FAF': 'Dialog Inc.', '000FA5': 'BWA Technology GmbH', '000F80': 'Trinity Security Systems,Inc.', '000F32': 'Lootom Telcovideo Network Wuxi Co Ltd', '000F2A': 'Cableware Electronics', '000F29': 'Augmentix Corporation', '000F27': 'TEAL Electronics, Inc.', '000F43': 'Wasabi Systems Inc.', '000F48': 'Polypix Inc.', '000F50': 'StreamScale Limited', '000F4E': 'Cellink', '000F47': 'ROBOX SPA', '000F18': 'Industrial Control Systems', '000F1D': 'Cosmo Techs Co., Ltd.', '000F1B': 'Ego Systems Inc.', '000F78': 'Datacap Systems Inc', '000F70': 'Wintec Industries, inc.', '000F74': 'Qamcom Technology AB', '000F6D': 'Midas Engineering', '000F5F': 'Nicety Technologies Inc. (NTS)', '000F5A': 'Peribit Networks', '000F31': 'Allied Vision Technologies Canada Inc', '000F73': 'RS Automation Co., Ltd', '000F3C': 'Endeleo Limited', '000EAB': 'Cray Inc', '000EAD': 'Metanoia Technologies, Inc.', '000EAF': 'CASTEL', '000EF8': 'SBC ASI', '000EF9': 'REA Elektronik GmbH', '000EE6': 'Adimos Systems LTD', '000EF6': 'E-TEN Information Systems Co., Ltd.', '000EEA': 'Shadong Luneng Jicheng Electronics,Co.,Ltd', '000F0F': 'Real ID Technology Co., Ltd.', '000F16': 'JAY HOW TECHNOLOGY CO.,', '000EC6': 'ASIX ELECTRONICS CORP.', '000EBF': 'Remsdaq Limited', '000EFF': 'Megasolution,Inc.', '000EE0': 'Mcharge', '000E9F': 'TEMIC SDS GmbH', '000E96': 'Cubic Defense Applications, Inc.', '000E8E': 'SparkLAN Communications, Inc.', '000E91': 'Navico Auckland Ltd', '000E48': 'Lipman TransAction Solutions', '000E3E': 'Sun Optronics Inc', '000E33': 'Shuko Electronics Co.,Ltd', '000E75': 'New York Air Brake Corp.', '000E7C': 'Televes S.A.', '000E66': 'Hitachi Industry & Control Solutions, Ltd.', '000E68': 'E-TOP Network Technology Inc.', '000E5E': 'Raisecom Technology', '000E56': '4G Systems GmbH & Co. KG', '000E55': 'AUVITRAN', '000E73': 'Tpack A/S', '000E72': 'CTS electronics', '000E6E': 'MAT S.A. (Mircrelec Advanced Technology)', '000E84': 'Cisco Systems, Inc', '000E87': 'adp Gauselmann GmbH', '000E92': 'Open Telecom', '000E53': 'AV TECH CORPORATION', '000DF9': 'NDS Limited', '000DFD': 'Huges Hi-Tech Inc.,', '000DFB': 'Komax AG', '000E00': 'Atrie', '000DF4': 'Watertek Co.', '000DFA': 'Micro Control Systems Ltd.', '000DFC': 'ITFOR Inc.', '000DFE': 'Hauppauge Computer Works, Inc.', '000DD6': 'ITI LTD', '000DD5': "O'RITE TECHNOLOGY CO.,LTD", '000E0F': 'ERMME', '000E10': 'C-guys, Inc.', '000E0A': 'SAKUMA DESIGN OFFICE', '000E0E': 'ESA elettronica S.P.A.', '000E18': 'MyA Technology', '000E14': 'Visionary Solutions, Inc.', '000E1B': 'IAV GmbH', '000E13': 'Accu-Sort Systems inc.', '000DDE': 'Joyteck Co., Ltd.', '000DE2': 'CMZ Sistemi Elettronici', '000DDA': 'ALLIED TELESIS K.K.', '000DCD': 'GROUPE TXCOM', '000DCA': 'Tait Electronics', '000DCF': 'Cidra Corp.', '000E3A': 'Cirrus Logic', '000E3B': 'Hawking Technologies, Inc.', '000DEC': 'Cisco Systems, Inc', '000DF2': 'Private', '000E27': 'Crere Networks, Inc.', '000DA0': 'NEDAP N.V.', '000D8E': 'Koden Electronics Co., Ltd.', '000D8A': 'Winners Electronics Co., Ltd.', '000D7E': 'Axiowave Networks, Inc.', '000D71': 'boca systems', '000D5A': 'Tiesse SpA', '000DB8': 'SCHILLER AG', '000DC4': 'Emcore Corporation', '000D9B': 'Heraeus Electro-Nite International N.V.', '000D7C': 'Codian Ltd', '000D6B': 'Mita-Teknik A/S', '000D43': 'DRS Tactical Systems Inc.', '000D44': 'Audio BU - Logitech', '000D36': 'Wu Han Routon Electronic Co., Ltd', '000D3D': 'Hammerhead Systems, Inc.', '000D3E': 'APLUX Communications Ltd.', '000D0D': 'ITSupported, LLC', '000D06': 'Compulogic Limited', '000D4A': 'Steag ETA-Optik', '000D4F': 'Kenwood Corporation', '000D47': 'Collex', '000D61': 'Giga-Byte Technology Co., Ltd.', '000D3B': 'Microelectronics Technology Inc.', '000D2D': 'NCT Deutschland GmbH', '000D1E': 'Control Techniques', '000D52': 'Comart system', '000D1A': 'Mustek System Inc.', '000CB8': 'MEDION AG', '000CBB': 'ISKRAEMECO', '000CC0': 'Genera Oy', '000CA8': 'Garuda Networks Corporation', '000D03': 'Matrics, Inc.', '000CFF': 'MRO-TEK Realty Limited', '000CFA': 'Digital Systems Corp', '000CFD': 'Hyundai ImageQuest Co.,Ltd.', '000CD3': 'Prettl Elektronik Radeberg GmbH', '000CD7': 'Nallatech Ltd', '000CD4': 'Positron Public Safety Systems inc.', '000CD6': 'PARTNER TECH', '000CB9': 'LEA', '000CBD': 'Interface Masters, Inc', '000CB2': 'UNION co., ltd.', '000CEB': 'CNMP Networks, Inc.', '000CCC': 'Aeroscout Ltd.', '000CC7': 'Intelligent Computer Solutions Inc.', '000CBE': 'Innominate Security Technologies AG', '000CA7': 'Metro (Suzhou) Technologies Co., Ltd.', '000CEF': 'Open Networks Engineering Ltd', '000C64': 'X2 MSA Group', '000CA0': 'StorCase Technology, Inc.', '000C99': 'HITEL LINK Co.,Ltd', '000C5A': 'IBSmm Embedded Electronics Consulting', '000C5E': 'Calypso Medical', '000C61': 'AC Tech corporation DBA Advanced Digital', '000C5F': 'Avtec, Inc.', '000C4B': 'Cheops Elektronik', '000C45': 'Animation Technologies Inc.', '000C3C': 'MediaChorus, Inc.', '000C7C': 'Internet Information Image Inc.', '000C7B': 'ALPHA PROJECT Co.,Ltd.', '000C77': 'Life Racing Ltd', '000C69': 'National Radio Astronomy Observatory', '000C66': 'Pronto Networks Inc', '000C88': 'Apache Micro Peripherals, Inc.', '000C82': 'NETWORK TECHNOLOGIES INC', '000C8D': 'MATRIX VISION GmbH', '000C89': 'AC Electric Vehicles, Ltd.', '000C4E': 'Winbest Technology CO,LT', '000BFE': 'CASTEL Broadband Limited', '000BF5': 'Shanghai Sibo Telecom Technology Co.,Ltd', '000C27': 'Sammy Corporation', '000C2A': 'OCTTEL Communication Co., Ltd.', '000C1C': 'MicroWeb Co., Ltd.', '000BDF': 'Shenzhen RouterD Networks Limited', '000BE6': 'Datel Electronics', '000BF2': 'Chih-Kan Technology Co., Ltd.', '000BEB': 'Systegra AG', '000BEF': 'Code Corporation', '000C05': 'RPA Reserch Co., Ltd.', '000C22': 'Double D Electronics Ltd', '000C0F': 'Techno-One Co., Ltd', '000C38': 'TelcoBridges Inc.', '000BAF': 'WOOJU COMMUNICATIONS Co,.Ltd', '000BB6': 'Metalligence Technology Corp.', '000BB3': 'RiT technologies Ltd.', '000BB7': 'Micro Systems Co.,Ltd.', '000BBA': 'Harmonic, Inc', '000B62': 'ib-mohnen KG', '000B64': 'Kieback & Peter GmbH & Co KG', '000B67': 'Topview Technology Corporation', '000B7D': 'SOLOMON EXTREME INTERNATIONAL LTD.', '000B94': 'Digital Monitoring Products, Inc.', '000BAE': 'Vitals System Inc.', '000BD9': 'General Hydrogen', '000BAB': 'Advantech Technology (CHINA) Co., Ltd.', '000B6D': 'SOLECTRON JAPAN NAKANIIDA', '000BC4': 'BIOTRONIK GmbH & Co', '000B57': 'Silicon Laboratories', '000B51': 'Micetek International Inc.', '000B53': 'INITIUM Co., Ltd.', '000AFB': 'Ambri Limited', '000AFF': 'Kilchherr Elektronik AG', '000B4A': 'Visimetrics (UK) Ltd', '000B48': 'sofrel', '000B1E': 'KAPPA opto-electronics GmbH', '000B1C': 'SIBCO bv', '000B37': 'MANUFACTURE DES MONTRES ROLEX SA', '000AF8': 'American Telecare Inc.', '000B17': 'MKS Instruments', '000B2D': 'Danfoss Inc.', '000AA3': 'SHIMAFUJI ELECTRIC CO.,LTD.', '000AA7': 'FEI Electron Optics', '000AA6': 'Hochiki Corporation', '000A9A': 'Aiptek International Inc', '000A94': 'ShangHai cellink CO., LTD', '000A97': 'SONICblue, Inc.', '000A92': 'Presonus Corporation', '000A85': "PLAT'C2,Inc", '000AD0': 'Niigata Develoment Center, F.I.T. Co., Ltd.', '000AD4': 'CoreBell Systems Inc.', '000ACA': 'YOKOYAMA SHOKAI CO.,Ltd.', '000ACE': 'RADIANTECH, INC.', '000AC7': 'Unication Group', '000ADE': 'Happy Communication Co., Ltd.', '000AE2': 'Binatone Electronics International, Ltd', '000ADB': 'Trilliant', '000AB8': 'Cisco Systems, Inc', '000AAC': 'TerraTec Electronic GmbH', '000ABF': 'HIROTA SS', '000ABC': 'Seabridge Ltd.', '000A50': 'REMOTEK CORPORATION', '000A58': 'Freyer & Siegel Elektronik GmbH & Co. KG', '000A4E': 'UNITEK Electronics INC.', '000A62': 'Crinis Networks, Inc.', '000A6A': 'SVM Microwaves s.r.o.', '000A66': 'MITSUBISHI ELECTRIC SYSTEM & SERVICE CO.,LTD.', '000A31': 'HCV Consulting', '004252': 'RLX Technologies', '000A70': 'MPLS Forum', '000A72': 'STEC, INC.', '000A3D': 'Elo Sistemas Eletronicos S.A.', '000A46': 'ARO WELDING TECHNOLOGIES SAS', '000A71': 'Avrio Technologies, Inc', '000A64': 'Eracom Technologies', '000A83': 'SALTO SYSTEMS S.L.', '000A86': 'Lenze', '000A3F': 'Data East Corporation', '000A0C': 'Scientific Research Corporation', '0009F6': 'Shenzhen Eastern Digital Tech Ltd.', '000A20': 'SVA Networks, Inc.', '000A24': 'Octave Communications', '000A19': 'Valere Power, Inc.', '0009E5': 'Hottinger Baldwin Messtechnik GmbH', '0009DE': 'Samjin Information & Communications Co., Ltd.', '0009E0': 'XEMICS S.A.', '000A01': 'SOHOware, Inc.', '0009EC': 'Daktronics, Inc.', '0009EE': 'MEIKYO ELECTRIC CO.,LTD', '0009CA': 'iMaxNetworks(Shenzhen)Limited.', '0009CF': 'iAd GmbH', '000A11': 'ExPet Technologies, Inc', '000A0F': 'Ilryung Telesys, Inc', '0009E7': 'ADC Techonology', '000993': 'Visteon Corporation', '000999': 'CP GEORGES RENAULT', '000994': 'Cronyx Engineering', '0009B9': 'Action Imaging Solutions', '0009AC': 'LANVOICE', '0009B1': 'Kanematsu Electronics, Ltd.', '0009B0': 'Onkyo Corporation', '000979': 'Advanced Television Systems Committee, Inc.', '000963': 'Dominion Lasercom Inc.', '000966': 'TRIMBLE EUROPE BV', '0009C1': 'PROCES-DATA A/S', '0009BB': 'MathStar, Inc.', '000968': 'TECHNOVENTURE, INC.', '000961': 'Switchgear and Instrumentation Ltd', '00097C': 'Cisco Systems, Inc', '00097B': 'Cisco Systems, Inc', '00099D': 'Haliplex Communications', '00099E': 'Testech, Inc.', '000988': 'Nudian Electron Co., Ltd.', '00098E': 'ipcas GmbH', '0009AB': 'Netcontrol Oy', '000960': 'YOZAN Inc.', '000956': 'Network Systems Group, Ltd. (NSG)', '000900': 'TMT', '000901': 'Shenzhen Shixuntong Information & Technoligy Co', '000913': 'SystemK Corporation', '00090E': 'Helix Technology Inc.', '00093C': 'Jacques Technologies P/L', '000935': 'Sandvine Incorporated', '000936': 'Ipetronik GmbH & Co. KG', '000937': 'Inventec Appliance Corp', '000946': 'Cluster Labs GmbH', '00093F': 'Double-Win Enterpirse CO., LTD', '000948': 'Vista Control Systems, Corp.', '000949': 'Glyph Technologies Inc.', '00092A': 'MYTECS Co.,Ltd.', '000925': 'VSN Systemen BV', '0008E0': 'ATO Technology Ltd.', '0008E4': 'Envenergy Inc', '0008E3': 'Cisco Systems, Inc', '0008E5': 'IDK Corporation', '0008D9': 'Mitadenshi Co.,LTD', '000891': 'Lyan Inc.', '000892': 'EM Solutions', '00088C': 'Quanta Network Systems Inc.', '00088A': 'Minds@Work', '0008FD': 'BlueKorea Co., Ltd.', '0008F8': 'UTC CCS', '0008D5': 'Vanguard Networks Solutions, LLC', '0008CD': 'With-Net Inc', '0008CC': 'Remotec, Inc.', '0008D1': 'KAREL INC.', '0008A7': 'iLogic Inc.', '000899': 'Netbind, Inc.', '0008A0': 'Stotz Feinmesstechnik GmbH', '0008BC': 'Ilevo AB', '0008BD': 'TEPG-US', '0008AE': 'PacketFront Network Products AB', '0008C3': 'Contex A/S', '0008F3': 'WANY', '0008DE': '3UP Systems', '000822': 'InPro Comm', '000823': 'Texa Corp.', '00081D': 'Ipsil, Incorporated', '00082D': 'Indus Teqsite Private Limited', '000820': 'Cisco Systems, Inc', '000828': 'Koei Engineering Ltd.', '000824': 'Nuance Document Imaging', '00086C': 'Plasmon LMS', '000868': 'PurOptix', '000869': 'Command-e Technology Co.,Ltd.', '000862': 'NEC Eluminant Technologies, Inc.', '000803': 'Cos Tron', '000805': 'Techno-Holon Corporation', '000808': 'PPT Vision, Inc.', '000814': 'TIL Technologies', '00085C': 'Shanghai Dare Technologies Co. Ltd.', '00082C': 'Homag AG', '000821': 'Cisco Systems, Inc', '000887': 'Maschinenfabrik Reinhausen GmbH', '000877': 'Liebert-Hiross Spa', '00087B': 'RTX Telecom A/S', '000861': 'SoftEnergy Co., Ltd.', '00084F': 'Qualstar Corporation', '000854': 'Netronix, Inc.', '000876': 'SDSystem', '000870': 'Rasvia Systems, Inc.', '00086E': 'Hyglo AB', '0007FD': 'LANergy Ltd.', '0007FE': 'Rigaku Corporation', '00047D': 'Pelco', '0007BF': 'Armillaire Technologies, Inc.', '0007BB': 'Candera Inc.', '0007BD': 'Radionet Ltd.', '0007C4': 'JEAN Co. Ltd.', '0007B6': 'Telecom Technology Ltd.', '0007B7': 'Samurai Ind. Prods Eletronicos Ltda', '0007B0': 'Office Details, Inc.', '0007D9': 'Splicecom', '0007DA': 'Neuro Telecom Co., Ltd.', '0007CD': 'Kumoh Electronic Co, Ltd', '0007CF': 'Anoto AB', '0007D2': 'Logopak Systeme GmbH & Co. KG', '0007C9': 'Technol Seven Co., Ltd.', '0007C7': 'Synectics Systems Limited', '0007C3': 'Thomson', '0007A5': 'Y.D.K Co. Ltd.', '00079C': 'Golden Electronics Technology Co., Ltd.', '0007E4': 'SoftRadio Co., Ltd.', '00078E': 'Garz & Friche GmbH', '000754': 'Xyterra Computing, Inc.', '000757': 'Topcall International AG', '000753': 'Beijing Qxcomm Technology Co., Ltd.', '00074C': 'Beicom Inc.', '00074D': 'Zebra Technologies Corp.', '000732': 'AAEON Technology Inc.', '000725': 'Bematech International Corp.', '000723': 'ELCON Systemtechnik GmbH', '00071D': 'Satelsa Sistemas Y Aplicaciones De Telecomunicaciones, S.A.', '000720': 'Trutzschler GmbH & Co. KG', '00076E': 'Sinetica Corporation Limited', '00076F': 'Synoptics Limited', '000773': 'Ascom Powerline Communications Ltd.', '00076C': 'Daehanet, Inc.', '00075D': 'Celleritas Inc.', '00073F': 'Woojyun Systec Co., Ltd.', '000728': 'Neo Telecom', '00072C': 'Fabricom', '00072D': 'CNSystems', '00072F': 'Intransa, Inc.', '000780': 'Bluegiga Technologies OY', '000777': 'Motah Ltd.', '000724': 'Telemax Co., Ltd.', '00071B': 'CDVI Americas Ltd', '000715': 'General Research of Electronics, Inc.', '000737': 'Soriya Co. Ltd.', '000734': 'ONStor, Inc.', '000765': 'Jade Quantum Technologies, Inc.', '0005EA': 'Rednix', '0006C9': 'Technical Marketing Research, Inc.', '0006C8': 'Sumitomo Metal Micro Devices, Inc.', '0006F1': 'Optillion', '0006A7': 'Primarion', '0006A9': 'Universal Instruments Corp.', '00069E': 'UNIQA, Inc.', '0006B1': 'Sonicwall', '0006AD': 'KB Electronics Ltd.', '0006AF': 'Xalted Networks', '0006D7': 'Cisco Systems, Inc', '0006D5': 'Diamond Systems Corp.', '0006FD': 'Comjet Information Systems Corp.', '0006F9': 'Mitsui Zosen Systems Research Inc.', '0006C0': 'United Internetworks, Inc.', '0006E0': 'MAT Co., Ltd.', '00D0B9': 'MICROTEK INTERNATIONAL, INC.', '00D05F': 'VALCOM, INC.', '000675': 'Banderacom, Inc.', '000698': 'egnite GmbH', '00069C': 'Transmode Systems AB', '000643': 'SONO Computer Co., Ltd.', '000649': '3M Deutschland GmbH', '00063E': 'Opthos Inc.', '00063B': 'Arcturus Networks Inc.', '00067B': 'Toplink C&C Corporation', '000670': 'Upponetti Oy', '00066F': 'Korea Data Systems', '000668': 'Vicon Industries Inc.', '00066D': 'Compuprint S.P.A.', '000626': 'MWE GmbH', '000620': 'Serial System Ltd.', '000618': 'DigiPower Manufacturing Inc.', '000633': 'Cross Match Technologies GmbH', '000686': 'ZARDCOM Co., Ltd.', '000689': 'yLez Technologies Pte Ltd', '000681': 'Goepel Electronic GmbH', '000658': 'Helmut Fischer GmbH Institut für Elektronik und Messtechnik', '0005D3': 'eProduction Solutions, Inc.', '000604': '@Track Communications, Inc.', '000606': 'RapidWAN, Inc.', '000603': 'Baker Hughes Inc.', '000607': 'Omni Directional Control Technology Inc.', '0005F7': 'Analog Devices, Inc.', '00059C': 'Kleinknecht GmbH, Ing. Büro', '0005AE': 'Mediaport USA', '0005B0': 'Korea Computer Technology Co., Ltd.', '0005B2': 'Medison Co., Ltd.', '000597': 'Eagle Traffic Control Systems', '0005B5': 'Broadcom Technologies', '0005CB': 'ROIS Technologies, Inc.', '0005C8': 'VERYTECH', '0005CD': 'D&M Holdings Inc.', '0005A2': 'CELOX Networks', '0005AA': 'Moore Industries International Inc.', '0005E7': 'Netrake an AudioCodes Company', '0005F4': 'System Base Co., Ltd.', '0005E1': 'Trellis Photonics, Ltd.', '0005E2': 'Creativ Network Technologies', '000613': 'Kawasaki Microelectronics Incorporated', '000617': 'Redswitch Inc.', '000551': 'F & S Elektronik Systeme GmbH', '00054D': 'Brans Technologies, Inc.', '000547': 'Starent Networks', '00054E': 'Philips', '000546': 'KDDI Network & Solultions Inc.', '000540': 'FAST Corporation', '00053C': 'XIRCOM', '000544': 'Valley Technologies, Inc.', '00052E': 'Cinta Networks', '00052F': 'Leviton Network Solutions', '00053B': 'Harbour Networks Ltd., Co. Beijing', '000528': 'New Focus, Inc.', '00056B': 'C.P. Technology Co., Ltd.', '000560': 'LEADER COMM.CO., LTD', '00055E': 'Cisco Systems, Inc', '000596': 'Genotech Co., Ltd.', '000579': 'Universal Control Solution Corp.', '00057F': 'Acqis Technology', '000573': 'Cisco Systems, Inc', '000575': 'CDS-Electronics BV', '000556': '360 Systems', '000559': 'Intracom S.A.', '00058C': 'Opentech Inc.', '0004C9': 'Micro Electron Co., Ltd.', '0004BE': 'OptXCon, Inc.', '0004C4': 'Audiotonix Group Limited', '0004C1': 'Cisco Systems, Inc', '0004D6': 'Takagi Industrial Co., Ltd.', '0004D1': 'Drew Technologies, Inc.', '0004D0': 'Softlink s.r.o.', '008087': 'OKI ELECTRIC INDUSTRY CO., LTD', '0004D9': 'Titan Electronics, Inc.', '0004D8': 'IPWireless, Inc.', '000524': 'BTL System (HK) Limited', '000522': 'LEA*D Corporation, Inc.', '000520': 'Smartronix, Inc.', '0004EB': 'Paxonet Communications, Inc.', '0004EF': 'Polestar Corp.', '000514': 'KDT Systems Co., Ltd.', '00050B': 'SICOM Systems, Inc.', '000505': 'Systems Integration Solutions, Inc.', '0004FE': 'Pelago Networks', '000518': 'Jupiters Technology', '0004B7': 'AMB i.t. Holding', '0004B9': 'S.I. Soubou, Inc.', '0004BB': 'Bardac Corporation', '0004BC': 'Giantec, Inc.', '0004AF': 'Digital Fountain, Inc.', '000456': 'Cambium Networks Limited', '000458': 'Fusion X Co., Ltd.', '00044F': 'Schubert System Elektronik Gmbh', '000446': 'CYZENTECH Co., Ltd.', '00044A': 'iPolicy Networks, Inc.', '000440': 'cyberPIXIE, Inc.', '00043C': 'SONOS Co., Ltd.', '0004B2': 'ESSEGI SRL', '0004B4': 'CIAC', '0004AD': 'Malibu Networks', '0004A9': 'SandStream Technologies, Inc.', '000461': 'EPOX Computer Co., Ltd.', '000462': 'DAKOS Data & Communication Co., Ltd.', '00045F': 'Avalue Technology, Inc.', '00049C': 'Surgient Networks, Inc.', '00049D': 'Ipanema Technologies', '000460': 'Knilink Technology, Inc.', '00048F': 'TD Systems Corporation', '000495': 'Tejas Networks India Limited', '00046F': 'Digitel S/A Industria Eletronica', '000469': 'Innocom, Inc.', '000487': 'Cogency Semiconductor, Inc.', '000468': 'Vivity, Inc.', '0003E1': 'Winmate Communication, Inc.', '0003E4': 'Cisco Systems, Inc', '0003DC': 'Lexar Media, Inc.', '0003D8': 'iMPath Networks, Inc.', '0003D5': 'Advanced Communications Co., Ltd.', '0003F7': 'Plast-Control GmbH', '0003FC': 'Intertex Data AB', '0003EF': 'Oneline AG', '0003F1': 'Cicada Semiconductor, Inc.', '000430': 'Netgem', '00042C': 'Minet, Inc.', '00042A': 'Wireless Networks, Inc.', '00042B': 'IT Access Co., Ltd.', '0003D6': 'RADVision, Ltd.', '0003D4': 'Alloptic, Inc.', '0003CE': 'ETEN Technologies, Inc.', '0003C8': 'CML Emergency Services', '0003C3': 'Micronik Multimedia', '000419': 'Fibercycle Networks, Inc.', '00041C': 'ipDialog, Inc.', '000418': 'Teltronic S.A.U.', '000370': 'NXTV, Inc.', '000402': 'Nexsan Technologies, Ltd.', '0003ED': 'Shinkawa Electric Co., Ltd.', '0003BD': 'OmniCluster Technologies, Inc.', '0003C0': 'RFTNC Co., Ltd.', '0003B8': 'NetKit Solutions, LLC', '0003B7': 'ZACCESS Systems', '0003A0': 'Cisco Systems, Inc', '0003A2': 'Catapult Communications', '00039C': 'OptiMight Communications, Inc.', '0003B0': 'Xsense Technology Corp.', '0003AA': 'Watlow', '0003A8': 'IDOT Computers, Inc.', '000355': 'TeraBeam Internet Systems', '000369': 'Nippon Antenna Co., Ltd.', '000373': 'Aselsan A.S', '000377': 'Gigabit Wireless', '000365': 'Kira Information & Communications, Ltd.', '000385': 'Actelis Networks, Inc.', '000354': 'Fiber Logic Communications', '000350': 'BTICINO SPA', '000351': 'Diebold, Inc.', '00034E': 'Pos Data Company, Ltd.', '000348': 'Norscan Instruments, Ltd.', '000346': 'Hitachi Kokusai Electric, Inc.', '000344': 'Tietech.Co., Ltd.', '0002E6': 'Gould Instrument Systems, Inc.', '0002E4': 'JC HYUN Systems, Inc.', '0002DE': 'Astrodesign, Inc.', '0002E2': 'NDC Infared Engineering', '0002E1': 'Integrated Network Corporation', '0002F7': 'ARM', '00D024': 'Cognex Corporation', '0002F1': 'Pinetron Co., Ltd.', '0002ED': 'DXO Telecom Co., Ltd.', '0002EC': 'Maschoff Design Engineering', '000336': 'Zetes Technologies', '000337': 'Vaone, Inc.', '00033B': 'TAMI Tech Co., Ltd.', '00032D': 'IBASE Technology, Inc.', '00B052': 'Atheros Communications', '000343': 'Martin Professional A/S', '000335': 'Mirae Technology', '0002F8': 'SEAKR Engineering, Inc.', '000314': 'Teleware Network Systems', '00032F': 'Global Sun Technology, Inc.', '0002DC': 'Fujitsu General Limited', '0002D7': 'EMPEG Ltd', '0002D3': 'NetBotz, Inc.', '0002DA': 'ExiO Communications, Inc.', '0002D4': 'PDA Peripherals, Inc.', '0002D6': 'NICE Systems', '00027D': 'Cisco Systems, Inc', '00027C': 'Trilithic, Inc.', '00028B': 'VDSL Systems OY', '00028C': 'Micrel-Synergy Semiconductor', '00028D': 'Movita Technologies, Inc.', '000298': 'Broadframe Corporation', '000297': 'C-COR.net', '000291': 'Open Network Co., Ltd.', '0002C1': 'Innovative Electronic Designs, Inc.', '0002C0': 'Bencent Tzeng Industry Co., Ltd.', '000299': 'Apex, Inc.', '000275': 'SMART Technologies, Inc.', '0002C6': 'Data Track Technology PLC', '0002A6': 'Effinet Systems Co., Ltd.', '00025F': 'Nortel Networks', '00025C': 'SCI Systems (Kunshan) Co., Ltd.', '000087': 'HITACHI, LTD.', '000258': 'Flying Packets Communications', '00024E': 'Datacard Group', '000242': 'Videoframe Systems', '000244': 'SURECOM Technology Co.', '00023E': 'Selta Telematica S.p.a', '000241': 'Amer.com', '000207': 'VisionGlobal Network Corp.', '000208': 'Unify Networks, Inc.', '000204': 'Bodmann Industries Elektronik GmbH', '000229': 'Adtec Corporation', '00022D': 'Agere Systems', '000226': 'XESystems, Inc.', '000225': 'One Stop Systems', '000211': 'Nature Worldwide Technology Corp.', '000212': 'SierraCom', '000217': 'Cisco Systems, Inc', '00017A': 'Chengdu Maipu Electric Industrial Co., Ltd.', '000238': 'Serome Technology, Inc.', '000269': 'Nadatel Co., Ltd', '000264': 'AudioRamp.com', '000255': 'IBM Corp', '000252': 'Carrier Corporation', '000220': 'CANON FINETECH INC.', '00020D': 'Micronpc.com', '0001D4': 'Leisure Time, Inc.', '0001DD': 'Avail Networks', '0001D5': 'HAEDONG INFO & COMM CO., LTD', '0001D7': 'F5 Networks, Inc.', '0001DE': 'Trango Systems, Inc.', '0001DC': 'Activetelco', '0001F9': 'TeraGlobal Communications Corp.', '0001FB': 'DoTop Technology, Inc.', '0001F8': 'TEXIO TECHNOLOGY CORPORATION', '0001BE': 'Gigalink Co., Ltd.', '0001B1': 'General Bandwidth', '0001BF': 'Teleforce Co., Ltd.', '0001B4': 'Wayport, Inc.', '0001BA': 'IC-Net, Inc.', '0001CF': 'Alpha Data Parallel Systems, Ltd.', '0001D0': 'VitalPoint, Inc.', '0001B0': 'Fulltek Technology Co., Ltd.', '0001A9': 'BMW AG', '0001AA': 'Airspan Communications, Ltd.', '00019E': 'ESS Technology, Inc.', '00017D': 'ThermoQuest', '000181': 'Nortel Networks', '000194': 'Capital Equipment Corporation', '000198': 'Darim Vision', '0001E8': 'Force10 Networks, Inc.', '0001E9': 'Litton Marine Systems B.V.', '0001E5': 'Supernet, Inc.', '0001A2': 'Logical Co., Ltd.', '000185': 'Hitachi Aloka Medical, Ltd.', '0001C7': 'Cisco Systems, Inc', '000121': 'WatchGuard Technologies, Inc.', '000129': 'DFI Inc.', '000119': 'RTUnet (Australia)', '000122': 'Trend Communications, Ltd.', '00011A': 'Hoffmann und Burmeister GbR', '00010A': 'CIS TECHNOLOGY INC.', '000167': 'HIOKI E.E. CORPORATION', '000168': 'VITANA CORPORATION', '000162': 'Cygnet Technologies, Inc.', '000154': 'G3M Corporation', '000152': 'CHROMATEK INC.', '000150': 'GILAT COMMUNICATIONS, LTD.', '000151': 'Ensemble Communications', '000115': 'EXTRATECH CORPORATION', '000101': 'Private', '00010D': 'Teledyne DALSA Inc.', '000105': 'Beckhoff Automation GmbH', '00B017': 'InfoGear Technology Corp.', '00012C': 'Aravox Technologies, Inc.', '000142': 'Cisco Systems, Inc', '000164': 'Cisco Systems, Inc', '00015F': 'DIGITAL DESIGN GmbH', '00012B': 'TELENET Co., Ltd.', '00013F': 'Neighbor World Co., Ltd.', '000124': 'Acer Incorporated', '003088': 'Ericsson', '003020': 'TSI, Inc..', '003095': 'Procomp Informatics, Ltd.', '0030CA': 'Discovery Com', '0030CE': 'Zaffire', '00307B': 'Cisco Systems, Inc', '0030B5': 'Tadiran Microwave Networks', '0030B8': 'RiverDelta Networks', '003071': 'Cisco Systems, Inc', '00303A': 'MAATEL', '00304E': 'BUSTEC PRODUCTION LTD.', '0030A4': 'Woodwind Communications System', '00303B': 'PowerCom Technology', '0030BC': 'Optronic AG', '00B02D': 'ViaGate Technologies, Inc.', '0030EE': 'DSG Technology, Inc.', '00309E': 'WORKBIT CORPORATION.', '0030DE': 'WAGO Kontakttechnik GmbH', '00303E': 'Radcom Ltd.', '0030D7': 'Innovative Systems, L.L.C.', '00B0CE': 'Viveris Technologies', '00B01C': 'Westport Technologies', '00B04A': 'Cisco Systems, Inc', '00B048': 'Marconi Communications Inc.', '00301B': 'SHUTTLE, INC.', '003021': 'HSING TECH. ENTERPRISE CO.,LTD', '00302C': 'SYLANTRO SYSTEMS CORPORATION', '0030DF': 'KB/TEL TELECOMUNICACIONES', '003030': 'HARMONIX CORPORATION', '003063': 'SANTERA SYSTEMS, INC.', '0030A3': 'Cisco Systems, Inc', '0030DD': 'INDIGITA CORPORATION', '003099': 'BOENIG UND KALLENBACH OHG', '0030F2': 'Cisco Systems, Inc', '003051': 'ORBIT AVIONIC & COMMUNICATION', '00308E': 'CROSS MATCH TECHNOLOGIES, INC.', '003027': 'KERBANGO, INC.', '003033': 'ORIENT TELECOM CO., LTD.', '003008': 'AVIO DIGITAL, INC.', '00301D': 'SKYSTREAM, INC.', '0030BA': 'AC&T SYSTEM CO., LTD.', '0030FD': 'INTEGRATED SYSTEMS DESIGN', '0030B9': 'ECTEL', '00307D': 'GRE AMERICA, INC.', '0030EF': 'NEON TECHNOLOGY, INC.', '003096': 'Cisco Systems, Inc', '003039': 'SOFTBOOK PRESS', '00D0F8': 'FUJIAN STAR TERMINAL', '00D0ED': 'XIOX', '00D097': 'Cisco Systems, Inc', '00D08E': 'Grass Valley, A Belden Brand', '00D056': 'SOMAT CORPORATION', '00D0E0': 'DOOIN ELECTRONICS CO.', '00D000': 'FERRAN SCIENTIFIC, INC.', '00D0D0': 'ZHONGXING TELECOM LTD.', '00D053': 'CONNECTED SYSTEMS', '00D033': 'DALIAN DAXIAN NETWORK', '00D0D6': 'AETHRA TELECOMUNICAZIONI', '00D063': 'Cisco Systems, Inc', '00D047': 'XN TECHNOLOGIES', '00D055': 'KATHREIN-WERKE KG', '00D03B': 'VISION PRODUCTS PTY. LTD.', '00D0B3': 'DRS Technologies Canada Ltd', '00D0AF': 'CUTLER-HAMMER, INC.', '00D052': 'ASCEND COMMUNICATIONS, INC.', '00D0AD': 'TL INDUSTRIES', '00D0A4': 'ALANTRO COMMUNICATIONS', '00D0B0': 'BITSWITCH LTD.', '00D030': 'Safetran Systems Corp', '00302A': 'SOUTHERN INFORMATION', '0030E1': 'Network Equipment Technologies, Inc.', '00302B': 'INALP NETWORKS, INC.', '003001': 'SMP', '00D08B': 'ADVA Optical Networking Ltd.', '00D0E4': 'Cisco Systems, Inc', '00D05A': 'SYMBIONICS, LTD.', '00D079': 'Cisco Systems, Inc', '00D021': 'REGENT ELECTRONICS CORP.', '00D09F': 'NOVTEK TEST SYSTEMS', '00D0FE': 'ASTRAL POINT', '00D0D4': 'V-BITS, INC.', '00D084': 'NEXCOMM SYSTEMS, INC.', '00D099': 'Elcard Wireless Systems Oy', '00D0E7': 'VCON TELECOMMUNICATION LTD.', '00D01B': 'MIMAKI ENGINEERING CO., LTD.', '00D00D': 'MICROMERITICS INSTRUMENT', '00D054': 'SAS INSTITUTE INC.', '00D009': 'HSING TECH. ENTERPRISE CO. LTD', '00D0F4': 'CARINTHIAN TECH INSTITUTE', '00D07D': 'COSINE COMMUNICATIONS', '00D083': 'INVERTEX, INC.', '00D0BA': 'Cisco Systems, Inc', '00D098': 'Photon Dynamics Canada Inc.', '00D0BE': 'EMUTEC INC.', '00D092': 'GLENAYRE WESTERN MULTIPLEX', '00509D': 'THE INDUSTREE B.V.', '00D0B8': 'Iomega Corporation', '0050F1': 'Intel Corporation', '0050CB': 'JETTER', '005058': 'Sangoma Technologies', '005074': 'ADVANCED HI-TECH CORP.', '00500A': 'IRIS TECHNOLOGIES, INC.', '00506D': 'VIDEOJET SYSTEMS', '0050CA': 'NET TO NET TECHNOLOGIES', '00D0C7': 'PATHWAY, INC.', '00D07A': 'AMAQUEST COMPUTER CORP.', '00503F': 'ANCHOR GAMES', '005032': 'PICAZO COMMUNICATIONS, INC.', '00D04A': 'PRESENCE TECHNOLOGY GMBH', '00D074': 'TAQUA SYSTEMS, INC.', '00504D': 'Tokyo Electron Device Limited', '005070': 'CHAINTECH COMPUTER CO., LTD.', '005023': 'PG DESIGN ELECTRONICS, INC.', '00509E': 'Les Technologies SoftAcoustik Inc.', '005071': 'AIWA CO., LTD.', '00505F': 'BRAND INNOVATORS', '0050B4': 'SATCHWELL CONTROL SYSTEMS, LTD', '0050D6': 'ATLAS COPCO TOOLS AB', '005082': 'FORESSON CORPORATION', '0050DF': 'AirFiber, Inc.', '0050C5': 'ADS Technologies, Inc', '00508E': 'OPTIMATION, INC.', '005028': 'AVAL COMMUNICATIONS', '00502F': 'TollBridge Technologies, Inc.', '0050FE': 'PCTVnet ASA', '0050AB': 'NALTEC, Inc.', '005037': 'KOGA ELECTRONICS CO.', '0050A8': 'OpenCon Systems, Inc.', '00509C': 'BETA RESEARCH', '0050B1': 'GIDDINGS & LEWIS', '005006': 'TAC AB', '005009': 'PHILIPS BROADBAND NETWORKS', '005030': 'FUTURE PLUS SYSTEMS', '005078': 'MEGATON HOUSE, LTD.', '005002': 'OMNISEC AG', '00506A': 'EDEVA, INC.', '0050AA': 'KONICA MINOLTA HOLDINGS, INC.', '005038': 'DAIN TELECOM CO., LTD.', '0050B7': 'BOSER TECHNOLOGY CO., LTD.', '009088': 'BAXALL SECURITY LTD.', '00906C': 'Sartorius Hamburg GmbH', '0090A4': 'ALTIGA NETWORKS', '0090F9': 'Imagine Communications', '009089': 'SOFTCOM MICROSYSTEMS, INC.', '0090EE': 'PERSONAL COMMUNICATIONS TECHNOLOGIES', '009080': 'NOT LIMITED, INC.', '0090E8': 'MOXA TECHNOLOGIES CORP., LTD.', '0090A1': 'Flying Pig Systems/High End Systems Inc.', '009079': 'ClearOne, Inc.', '00909A': 'ONE WORLD SYSTEMS, INC.', '0090C2': 'JK microsystems, Inc.', '0050D0': 'MINERVA SYSTEMS', '0050D8': 'UNICORN COMPUTER CORP.', '0050B2': 'BRODEL GmbH', '009076': 'FMT AIRCRAFT GATE SUPPORT SYSTEMS AB', '009017': 'Zypcom, Inc', '009049': 'ENTRIDIA CORPORATION', '0090E6': 'ALi Corporation', '009070': 'NEO NETWORKS, INC.', '009030': 'HONEYWELL-DATING', '009008': 'HanA Systems Inc.', '0090AC': 'OPTIVISION, INC.', '00904E': 'DELEM BV', '0090ED': 'CENTRAL SYSTEM RESEARCH CO., LTD.', '00901E': 'Selesta Ingegneria S.p.A.', '009075': 'NEC DO BRASIL S.A.', '0090AD': 'ASPECT ELECTRONICS, INC.', '009001': 'NISHIMU ELECTRONICS INDUSTRIES CO., LTD.', '009043': 'Tattile SRL ', '0090CB': 'Wireless OnLine, Inc.', '001063': 'STARGUIDE DIGITAL NETWORKS', '001023': 'Network Equipment Technologies', '00102B': 'UMAX DATA SYSTEMS, INC.', '00908A': 'BAYLY COMMUNICATIONS, INC.', '00900E': 'HANDLINK TECHNOLOGIES, INC.', '0090C1': 'Peco II, Inc.', '00108D': 'Johnson Controls, Inc.', '001045': 'Nortel Networks', '00107D': 'AURORA COMMUNICATIONS, LTD.', '0090E4': 'NEC AMERICA, INC.', '009040': 'Siemens Network Convergence LLC', '0090C8': 'WAVERIDER COMMUNICATIONS (CANADA) INC.', '00901B': 'DIGITAL CONTROLS', '0090F7': 'NBASE COMMUNICATIONS LTD.', '009012': 'GLOBESPAN SEMICONDUCTOR, INC.', '0090B7': 'DIGITAL LIGHTWAVE, INC.', '0090A0': '8X8 INC.', '009047': 'GIGA FAST E. LTD.', '0090E1': 'TELENA S.P.A.', '009032': 'PELCOMBE GROUP LTD.', '001062': 'NX SERVER, ILNC.', '0010F0': 'RITTAL-WERK RUDOLF LOH GmbH & Co.', '001001': 'Citel', '00105C': 'QUANTUM DESIGNS (H.K.) LTD.', '0010CF': 'FIBERLANE COMMUNICATIONS', '001069': 'HELIOSS COMMUNICATIONS, INC.', '0010BF': 'InterAir Wireless', '001026': 'ACCELERATED NETWORKS, INC.', '001036': 'INTER-TEL INTEGRATED SYSTEMS', '001039': 'Vectron Systems AG', '0010B6': 'ENTRATA COMMUNICATIONS CORP.', '0010EC': 'RPCG, LLC', '001059': 'DIABLO RESEARCH CO. LLC', '0010FC': 'BROADBAND NETWORKS, INC.', '001031': 'OBJECTIVE COMMUNICATIONS, INC.', '00106D': 'Axxcelera Broadband Wireless', '00104C': 'Teledyne LeCroy, Inc', '0010CC': 'CLP COMPUTER LOGISTIK PLANUNG GmbH', '001030': 'EION Inc.', '0010D0': 'WITCOM, LTD.', '001093': 'CMS COMPUTERS, LTD.', '00108F': 'RAPTOR SYSTEMS', '0010A4': 'XIRCOM', '0010F1': 'I-O CORPORATION', '001066': 'ADVANCED CONTROL SYSTEMS, INC.', '0010AC': 'IMCI TECHNOLOGIES', '0010B1': 'FOR-A CO., LTD.', '0010EE': 'CTI PRODUCTS, INC.', '001041': 'BRISTOL BABCOCK, INC.', '0010AA': 'MEDIA4, INC.', '0010E8': 'TELOCITY, INCORPORATED', '0010A2': 'TNS', '001065': 'RADYNE CORPORATION', '00109F': 'PAVO, INC.', '00101D': 'WINBOND ELECTRONICS CORP.', '001084': 'K-BOT COMMUNICATIONS', '001000': 'CABLE TELEVISION LABORATORIES, INC.', '001009': 'HORANET', '0010F8': 'TEXIO TECHNOLOGY CORPORATION', '0010C0': 'ARMA, Inc.', '00105B': 'NET INSIGHT AB', '001002': 'ACTIA', '0010EB': 'SELSIUS SYSTEMS, INC.', '001057': 'Rebel.com, Inc.', '0010F9': 'UNIQUE SYSTEMS, INC.', '001075': 'Segate Technology LLC', '00E003': 'NOKIA WIRELESS BUSINESS COMMUN', '00E0F3': 'WebSprint Communications, Inc.', '08BBCC': 'AK-NORD EDV VERTRIEBSGES. mbH', '00E0DB': 'ViaVideo Communications, Inc.', '00E0A6': 'TELOGY NETWORKS, INC.', '00E09F': 'PIXEL VISION', '00E0CC': 'HERO SYSTEMS, LTD.', '00E080': 'CONTROL RESOURCES CORPORATION', '00E004': 'PMC-SIERRA, INC.', '00E03B': 'PROMINET CORPORATION', '00E0F5': 'TELES AG', '00E0D7': 'SUNSHINE ELECTRONICS, INC.', '00E0B5': 'ARDENT COMMUNICATIONS CORP.', '00E068': 'MERRIMAC SYSTEMS INC.', '00E049': 'MICROWI ELECTRONIC GmbH', '00E095': 'ADVANCED-VISION TECHNOLGIES CORP.', '00E00E': 'AVALON IMAGING SYSTEMS, INC.', '00E048': 'SDL COMMUNICATIONS, INC.', '00E0CB': 'RESON, INC.', '00E0C8': 'VIRTUAL ACCESS, LTD.', '00E006': 'SILICON INTEGRATED SYS. CORP.', '00E0AC': 'MIDSCO, INC.', '00E008': 'AMAZING CONTROLS! INC.', '00E0AE': 'XAQTI CORPORATION', '00E0E0': 'SI ELECTRONICS, LTD.', '00E050': 'EXECUTONE INFORMATION SYSTEMS, INC.', '00E023': 'TELRAD', '00E02C': 'AST COMPUTER', '00E067': 'eac AUTOMATION-CONSULTING GmbH', '00E0FA': 'TRL TECHNOLOGY, LTD.', '00E02A': 'TANDBERG TELEVISION AS', '00E04E': 'SANYO DENKI CO., LTD.', '00E012': 'PLUTO TECHNOLOGIES INTERNATIONAL INC.', '00E04C': 'REALTEK SEMICONDUCTOR CORP.', '00E051': 'TALX CORPORATION', '00606B': 'Synclayer Inc.', '00603B': 'AMTEC spa', '00E039': 'PARADYNE CORP.', '00600B': 'LOGWARE GmbH', '00E0C7': 'EUROTECH SRL', '00E0AF': 'GENERAL DYNAMICS INFORMATION SYSTEMS', '00E054': 'KODAI HITEC CO., LTD.', '00E0B9': 'BYAS SYSTEMS', '00604B': 'Safe-com GmbH & Co. KG', '00E0EF': 'DIONEX', '00E02D': 'InnoMediaLogic, Inc.', '00E035': 'Artesyn Embedded Technologies', '00E090': 'BECKMAN LAB. AUTOMATION DIV.', '006001': 'InnoSys, Inc.', '0060FE': 'LYNX SYSTEM DEVELOPERS, INC.', '0060BD': 'Enginuity Communications', '000800': 'MULTITECH SYSTEMS, INC.', '00E085': 'GLOBAL MAINTECH, INC.', '00E0BE': 'GENROCO INTERNATIONAL, INC.', '00E0B6': 'Entrada Networks', '00E0F4': 'INSIDE Technology A/S', '00E0A0': 'WILTRON CO.', '00E0F1': 'THAT CORPORATION', '0060D5': 'AMADA MIYACHI Co., Ltd', '00603F': 'PATAPSCO DESIGNS', '0060B5': 'KEBA GmbH', '006014': 'EDEC CO., LTD.', '0060AC': 'RESILIENCE CORPORATION', '00604E': 'CYCLE COMPUTER CORPORATION, INC.', '0060E1': 'ORCKIT COMMUNICATIONS LTD.', '0060D2': 'LUCENT TECHNOLOGIES TAIWAN TELECOMMUNICATIONS CO., LTD.', '006042': 'TKS (USA), INC.', '006079': 'Mainstream Data, Inc.', '00609A': 'NJK TECHNO CO.', '00602B': 'PEAK AUDIO', '0060F1': 'EXP COMPUTER, INC.', '0060E6': 'SHOMITI SYSTEMS INCORPORATED', '0060FF': 'QuVis, Inc.', '006067': 'ACER NETXUS INC.', '00609F': 'PHAST CORPORATION', '006040': 'NETRO CORP.', '0060CC': 'EMTRAK, INCORPORATED', '00602C': 'LINX Data Terminals, Inc.', '00607E': 'GIGALABS, INC.', '0060CD': 'VideoServer, Inc.', '0060AA': 'INTELLIGENT DEVICES INC. (IDI)', '006025': 'ACTIVE IMAGING PLC', '0060A7': 'MICROSENS GmbH & CO. KG', '0005A8': 'WYLE ELECTRONICS', '0060E5': 'FUJI AUTOMATION CO., LTD.', '00605E': 'LIBERTY TECHNOLOGY NETWORKING', '0060C6': 'DCS AG', '00601E': 'SOFTLAB, INC.', '006065': 'BERNECKER & RAINER INDUSTRIE-ELEKTRONIC GmbH', '00605D': 'SCANIVALVE CORP.', '00606F': 'CLARION CORPORATION OF AMERICA', '00A010': 'SYSLOGIC DATENTECHNIK AG', '00A059': 'HAMILTON HALLMARK', '00A039': 'ROSS TECHNOLOGY, INC.', '00A0AD': 'MARCONI SPA', '00A0D6': 'SBE, Inc.', '00A02E': 'BRAND COMMUNICATIONS, LTD.', '00604A': 'SAIC IDEAS GROUP', '00A0BD': 'I-TECH CORP.', '006090': 'Artiza Networks Inc', '00600D': 'Digital Logic GmbH', '006030': 'VILLAGE TRONIC ENTWICKLUNG', '00A08D': 'JACOMO CORPORATION', '00A08E': 'Check Point Software Technologies', '00A0FC': 'IMAGE SCIENCES, INC.', '00A09C': 'Xyplex, Inc.', '00A00D': 'THE PANDA PROJECT', '00A0E9': 'ELECTRONIC RETAILING SYSTEMS INTERNATIONAL', '00A0BE': 'INTEGRATED CIRCUIT SYSTEMS, INC. COMMUNICATIONS GROUP', '00A016': 'MICROPOLIS CORP.', '00A048': 'QUESTECH, LTD.', '00A003': 'Siemens Switzerland Ltd., I B T HVP', '00A0F9': 'BINTEC COMMUNICATIONS GMBH', '00A0F5': 'RADGUARD LTD.', '00A0CA': 'FUJITSU DENSO LTD.', '00A022': 'CENTRE FOR DEVELOPMENT OF ADVANCED COMPUTING', '00A0B6': 'SANRITZ AUTOMATION CO., LTD.', '00A079': 'ALPS ELECTRIC (USA), INC.', '00A0C0': 'DIGITAL LINK CORP.', '00A01E': 'EST CORPORATION', '00A0AE': 'NUCOM SYSTEMS, INC.', '00A062': 'AES PRODATA', '00A076': 'CARDWARE LAB, INC.', '00A0A1': 'EPIC DATA INC.', '00A044': 'NTT IT CO., LTD.', '00A011': 'MUTOH INDUSTRIES LTD.', '00A0BA': 'PATTON ELECTRONICS CO.', '00A0B5': '3H TECHNOLOGY', '00A04D': 'EDA INSTRUMENTS, INC.', '00A086': 'AMBER WAVE SYSTEMS, INC.', '00A0AF': 'WMS INDUSTRIES', '00A057': 'LANCOM Systems GmbH', '00A030': 'CAPTOR NV/SA', '00A0DE': 'YAMAHA CORPORATION', '00A084': 'Dataplex Pty Ltd', '00A049': 'DIGITECH INDUSTRIES, INC.', '00A09D': 'JOHNATHON FREEMAN TECHNOLOGIES', '00A06B': 'DMS DORSCH MIKROSYSTEM GMBH', '00A0F8': 'Zebra Technologies Inc', '00A09F': 'COMMVISION CORP.', '00A06E': 'AUSTRON, INC.', '002022': 'NMS Communications', '0020AE': 'ORNET DATA COMMUNICATION TECH.', '0020AA': 'Ericsson Television Limited', '0020A4': 'MULTIPOINT NETWORKS', '000267': 'NODE RUNNER, INC.', '0020B1': 'COMTECH RESEARCH INC.', '002032': 'ALCATEL TAISEL', '0020E9': 'DANTEL', '002038': 'VME MICROSYSTEMS INTERNATIONAL CORPORATION', '0020A3': 'Harmonic, Inc', '002059': 'MIRO COMPUTER PRODUCTS AG', '002034': 'ROTEC INDUSTRIEAUTOMATION GMBH', '002079': 'MIKRON GMBH', '002005': 'SIMPLE TECHNOLOGY', '002018': 'CIS TECHNOLOGY INC.', '002098': 'HECTRONIC AB', '0020FD': 'ITV TECHNOLOGIES, INC.', '0020FA': 'GDE SYSTEMS, INC.', '0020C1': 'SAXA, Inc.', '002080': 'SYNERGY (UK) LTD.', '00C023': 'TUTANKHAMON ELECTRONICS', '00C08B': 'RISQ MODULAR SYSTEMS, INC.', '0020C4': 'INET,INC.', '002074': 'SUNGWOON SYSTEMS', '00203C': 'EUROTIME AB', '002028': 'WEST EGG SYSTEMS, INC.', '002068': 'ISDYNE', '0020C8': 'LARSCOM INCORPORATED', '00209D': 'LIPPERT AUTOMATIONSTECHNIK', '00209C': 'PRIMARY ACCESS CORP.', '00206D': 'DATA RACE, INC.', '00203A': 'DIGITAL BI0METRICS INC.', '002048': 'Marconi Communications', '0020DC': 'DENSITRON TAIWAN LTD.', '00200C': 'ADASTRA SYSTEMS CORP.', '002011': 'CANOPUS CO., LTD.', '002051': 'Verilink Corporation', '00203B': 'WISDM LTD.', '0020BA': 'CENTER FOR HIGH PERFORMANCE', '0020F5': 'PANDATEL AG', '00200E': 'NSSLGlobal Technologies AS', '0020E7': 'B&W NUCLEAR SERVICE COMPANY', '0020F0': 'UNIVERSAL MICROELECTRONICS CO.', '002089': 'T3PLUS NETWORKING, INC.', '002061': 'GarrettCom, Inc.', '00C080': 'NETSTAR, INC.', '00C0B4': 'MYSON TECHNOLOGY, INC.', '00C045': 'ISOLATION SYSTEMS, LTD.', '0070B3': 'DATA RECALL LTD.', '0070B0': 'M/A-COM INC. COMPANIES', '00E6D3': 'NIXDORF COMPUTER CORP.', '00C0C3': 'ACUSON COMPUTED SONOGRAPHY', '00C0B3': 'COMSTAT DATACOMM CORPORATION', '00C0E5': 'GESPAC, S.A.', '00C04D': 'MITEC, INC.', '00C047': 'UNIMICRO SYSTEMS, INC.', '00C084': 'DATA LINK CORP. LTD.', '00C041': 'DIGITAL TRANSMISSION SYSTEMS', '00C01F': 'S.E.R.C.E.L.', '006086': 'LOGIC REPLACEMENT TECH. LTD.', '00C059': 'DENSO CORPORATION', '00C0F1': 'SHINKO ELECTRIC CO., LTD.', '00C0A1': 'TOKYO DENSHI SEKEI CO.', '00C02E': 'NETWIZ', '00C00D': 'ADVANCED LOGIC RESEARCH, INC.', '00C081': 'METRODATA LTD.', '00C03B': 'MULTIACCESS COMPUTING CORP.', '00C082': 'MOORE PRODUCTS CO.', '00C099': 'YOSHIKI INDUSTRIAL CO.,LTD.', '00C001': 'DIATEK PATIENT MANAGMENT', '00C0F4': 'INTERLINK SYSTEM CO., LTD.', '00C0E2': 'CALCOMP, INC.', '00C07B': 'ASCEND COMMUNICATIONS, INC.', '00C03C': 'TOWER TECH S.R.L.', '00C01D': 'GRAND JUNCTION NETWORKS, INC.', '00C035': 'QUINTAR COMPANY', '00C070': 'SECTRA SECURE-TRANSMISSION AB', '00C06D': 'BOCA RESEARCH, INC.', '00C0EA': 'ARRAY TECHNOLOGY LTD.', '00C009': 'KT TECHNOLOGY (S) PTE LTD', '00C0D6': 'J1 SYSTEMS, INC.', '00C0DC': 'EOS TECHNOLOGIES, INC.', '00C072': 'KNX LTD.', '00C0AE': 'TOWERCOM CO. INC. DBA PC HOUSE', '00C0C2': 'INFINITE NETWORKS LTD.', '00C0AF': 'TEKLOGIX INC.', '00C07A': 'PRIVA B.V.', '00C0F6': 'CELAN TECHNOLOGY INC.', '00C0F8': 'ABOUT COMPUTING INC.', '00C078': 'COMPUTER SYSTEMS ENGINEERING', '00C09A': 'PHOTONICS CORPORATION', '00C01A': 'COROMETRICS MEDICAL SYSTEMS', '00C068': 'HME Clear-Com LTD.', '00C0D8': 'UNIVERSAL DATA SYSTEMS', '004036': 'Zoom Telephonics, Inc', '004016': 'ADC - Global Connectivity Solutions Division', '00406A': 'KENTEK INFORMATION SYSTEMS,INC', '00400A': 'PIVOTAL TECHNOLOGIES, INC.', '004099': 'NEWGEN SYSTEMS CORP.', '004011': 'ANDOVER CONTROLS CORPORATION', '0040A1': 'ERGO COMPUTING', '004081': 'MANNESMANN SCANGRAPHIC GMBH', '00C08C': 'PERFORMANCE TECHNOLOGIES, INC.', '00C007': 'PINNACLE DATA SYSTEMS, INC.', '00C098': 'CHUNTEX ELECTRONIC CO., LTD.', '00C0BE': 'ALCATEL - SEL', '00C06E': 'HAFT TECHNOLOGY, INC.', '00C08A': 'Lauterbach GmbH', '00C0F7': 'ENGAGE COMMUNICATION, INC.', '0040B7': 'STEALTH COMPUTER SYSTEMS', '0040AC': 'SUPER WORKSTATION, INC.', '10005A': 'IBM Corp', '0040D1': 'FUKUDA DENSHI CO., LTD.', '004069': 'LEMCOM SYSTEMS, INC.', '00403B': 'SYNERJET INTERNATIONAL CORP.', '00803B': 'APT COMMUNICATIONS, INC.', '00806A': 'ERI (EMPAC RESEARCH INC.)', '00C0A8': 'GVC CORPORATION', '0040E0': 'ATOMWIDE LTD.', '0040A8': 'IMF INTERNATIONAL LTD.', '004070': 'INTERWARE CO., LTD.', '00408A': 'TPS TELEPROCESSING SYS. GMBH', '0040FD': 'LXE', '00403F': 'SSANGYONG COMPUTER SYSTEMS', '004082': 'LABORATORY EQUIPMENT CORP.', '0040F1': 'CHUO ELECTRONICS CO., LTD.', '0040A9': 'DATACOM INC.', '0040E3': 'QUIN SYSTEMS LTD', '004091': 'PROCOMP INDUSTRIA ELETRONICA', '0040EA': 'PLAIN TREE SYSTEMS INC', '0040A7': 'ITAUTEC PHILCO S.A.', '004064': 'KLA INSTRUMENTS CORPORATION', '004043': 'Nokia Siemens Networks GmbH & Co. KG.', '00405A': 'GOLDSTAR INFORMATION & COMM.', '004013': 'NTT DATA COMM. SYSTEMS CORP.', '00400C': 'GENERAL MICRO SYSTEMS, INC.', '00405E': 'NORTH HILLS ISRAEL', '0040FA': 'MICROBOARDS, INC.', '004014': 'COMSOFT GMBH', '004000': 'PCI COMPONENTES DA AMZONIA LTD', '00406C': 'COPERNIQUE', '004075': 'Tattile SRL ', '004053': 'AMPRO COMPUTERS', '008038': 'DATA RESEARCH & APPLICATIONS', '00805E': 'LSI LOGIC CORPORATION', '008060': 'NETWORK INTERFACE CORPORATION', '0080C3': 'BICC INFORMATION SYSTEMS & SVC', '008044': 'SYSTECH COMPUTER CORP.', '008006': 'COMPUADD CORPORATION', '00809B': 'JUSTSYSTEM CORPORATION', '0080DF': 'ADC CODENOLL TECHNOLOGY CORP.', '008028': 'TRADPOST (HK) LTD', '008061': 'LITTON SYSTEMS, INC.', '0080F5': 'Quantel Ltd', '0080B9': 'ARCHE TECHNOLIGIES INC.', '004063': 'VIA TECHNOLOGIES, INC.', '00808A': 'SUMMIT MICROSYSTEMS CORP.', '0080A7': 'Honeywell International Inc', '008066': 'ARCOM CONTROL SYSTEMS, LTD.', '0080CB': 'FALCO DATA PRODUCTS', '008007': 'DLOG NC-SYSTEME', '008062': 'INTERFACE CO.', '00801E': 'XINETRON, INC.', '0080E2': 'T.D.I. CO., LTD.', '008049': 'NISSIN ELECTRIC CO., LTD.', '0080C1': 'LANEX CORPORATION', '0080A3': 'Lantronix', '0080BC': 'HITACHI ENGINEERING CO., LTD', '008036': 'REFLEX MANUFACTURING SYSTEMS', '008083': 'AMDAHL', '0080B8': 'DMG MORI B.U.G. CO., LTD.', '00804D': 'CYCLONE MICROSYSTEMS, INC.', '0080D4': 'CHASE RESEARCH LTD.', '00803D': 'SURIGIKEN CO., LTD.', '00808B': 'DACOLL LIMITED', '0080B2': 'NETWORK EQUIPMENT TECHNOLOGIES', '008076': 'MCNC', '00800B': 'CSK CORPORATION', '008018': 'KOBE STEEL, LTD.', '008068': 'YAMATECH SCIENTIFIC LTD.', '0080A8': 'VITACOM CORPORATION', '008033': 'EMS Aviation, Inc.', '00807C': 'FIBERCOM, INC.', '008091': 'TOKYO ELECTRIC CO.,LTD', '00008E': 'SOLBOURNE COMPUTER, INC.', '0000DC': 'HAYES MICROCOMPUTER PRODUCTS', '000063': 'BARCO CONTROL ROOMS GMBH', '00004E': 'AMPEX CORPORATION', '0000BD': 'Mitsubishi Cable Industries, Ltd. / Ryosei Systems', '00002E': 'SOCIETE EVIRA', '00003F': 'SYNTREX, INC.', '00809D': 'Commscraft Ltd.', '0080F4': 'TELEMECANIQUE ELECTRIQUE', '008022': 'SCAN-OPTICS', '0000CD': 'Allied Telesis Labs Ltd', '0080DD': 'GMX INC/GIMIX', '0080FB': 'BVM LIMITED', '0080B4': 'SOPHIA SYSTEMS', '00807F': 'DY-4 INCORPORATED', '00802D': 'XYLOGICS INC', '000061': 'GATEWAY COMMUNICATIONS', '0000EA': 'UPNOD AB', '000043': 'MICRO TECHNOLOGY', '000017': 'Oracle', '0000B2': 'TELEVIDEO SYSTEMS, INC.', '0000EE': 'NETWORK DESIGNERS, LTD.', '0000E5': 'SIGMEX LTD.', '000089': 'CAYMAN SYSTEMS INC.', '0000FF': 'CAMTEC ELECTRONICS LTD.', '0000B7': 'DOVE COMPUTER CORPORATION', '0000F2': 'SPIDER COMMUNICATIONS', '0000CC': 'DENSAN CO., LTD.', '0000A4': 'ACORN COMPUTERS LIMITED', '0000DB': 'British Telecommunications plc', '0000C1': 'Madge Ltd.', '0000F6': 'APPLIED MICROSYSTEMS CORP.', '000077': 'INTERPHASE CORPORATION', '0000A2': 'Bay Networks', '0000EC': 'MICROPROCESS', '0000C2': 'INFORMATION PRESENTATION TECH.', '0000FC': 'MEIKO', '00006D': 'CRAY COMMUNICATIONS, LTD.', '0000DA': 'ATEX', '0000DD': 'TCL INCORPORATED', '0000AE': 'DASSAULT ELECTRONIQUE', '0000A0': 'SANYO Electric Co., Ltd.', '0000C0': 'WESTERN DIGITAL CORPORATION', '000033': 'EGAN MACHINERY COMPANY', '00009D': 'LOCUS COMPUTING CORPORATION', '0000FD': 'HIGH LEVEL HARDWARE', '000065': 'Network General Corporation', '000011': 'NORMEREL SYSTEMES', '000010': 'SYTEK INC.', '0000BC': 'Rockwell Automation', '08007E': 'AMALGAMATED WIRELESS(AUS) LTD', '08007F': 'CARNEGIE-MELLON UNIVERSITY', '000099': 'MTX, INC.', '0000C4': 'WATERS DIV. OF MILLIPORE', '0000EB': 'MATSUSHITA COMM. IND. CO. LTD.', '000028': 'PRODIGY SYSTEMS CORPORATION', '08003B': 'TORUS SYSTEMS LIMITED', '08003C': 'SCHLUMBERGER WELL SERVICES', '080034': 'FILENET CORPORATION', '080036': 'INTERGRAPH CORPORATION', '080033': 'BAUSCH & LOMB', '080048': 'EUROTHERM GAUGING SYSTEMS', '080043': 'PIXEL COMPUTER INC.', '080045': 'CONCURRENT COMPUTER CORP.', '080078': 'ACCELL CORPORATION', '08006D': 'WHITECHAPEL COMPUTER WORKS', '080030': 'CERN', '080031': 'LITTLE MACHINES INC.', '08002E': 'METAPHOR COMPUTER SYSTEMS', '080056': 'STANFORD LINEAR ACCEL. CENTER', '08004F': 'CYGNET SYSTEMS', '080050': 'DAISY SYSTEMS CORP.', '08005E': 'COUNTERPOINT COMPUTER INC.', '080076': 'PC LAN TECHNOLOGIES', '080075': 'DANSK DATA ELECTRONIK', '08002B': 'DIGITAL EQUIPMENT CORPORATION', '080029': 'Megatek Corporation', '0270B0': 'M/A-COM INC. COMPANIES', '000053': 'COMPUCORP', '080090': 'SONOMA SYSTEMS', '08000A': 'NESTAR SYSTEMS INCORPORATED', '00800F': 'STANDARD MICROSYSTEMS', '00406B': 'SYSGEN', '08000F': 'MITEL CORPORATION', '080023': 'Panasonic Communications Co., Ltd.', 'B04FC3': 'Shenzhen NVC Cloud Technology Co., Ltd.', '08001C': 'KDD-KOKUSAI DEBNSIN DENWA CO.', '00DD0C': 'UNGERMANN-BASS INC.', '080018': 'PIRELLI FOCOM NETWORKS', '0000A6': 'NETWORK GENERAL CORPORATION', '00BBF0': 'UNGERMANN-BASS INC.', '00408E': 'Tattile SRL ', '000004': 'XEROX CORPORATION', '00DD0E': 'UNGERMANN-BASS INC.', '88571D': 'Seongji Industry Company', '7CF31B': 'LG Electronics (Mobile Communications)', '0001C8': 'CONRAD CORP.', 'CCEF03': 'Hunan Keyshare Communication Technology Co., Ltd.', '102FA3': 'Shenzhen Uvision-tech Technology Co.Ltd', '7048F7': 'Nintendo Co.,Ltd', '18E1CA': 'wanze', 'ECBEDD': 'Sagemcom Broadband SAS', '00C0B6': 'HVE, Inc. ', '309176': 'Skyworth Digital Technology(Shenzhen) Co.,Ltd', '78C881': 'Sony Interactive Entertainment Inc.', 'D44F68': 'Eidetic Communications Inc', '749EA5': 'OHSUNG', '340F66': 'MicroArx Corporation', '8CC84B': 'CHONGQING FUGUI ELECTRONICS CO.,LTD.', '0C2FB0': 'Samsung Electronics Co.,Ltd', 'B40216': 'Cisco Systems, Inc', '54A493': 'IEEE Registration Authority', '6C1C71': 'Zhejiang Dahua Technology Co., Ltd.', 'CC6A10': 'The Chamberlain Group, Inc', 'F03F95': 'HUAWEI TECHNOLOGIES CO.,LTD', '185644': 'HUAWEI TECHNOLOGIES CO.,LTD', '9C69D1': 'HUAWEI TECHNOLOGIES CO.,LTD', '185A58': 'Dell Inc.', 'C43A35': 'FN-LINK TECHNOLOGY LIMITED', '04D16E': 'IEEE Registration Authority', '040E3C': 'HP Inc.', 'C4E0DE': 'Zhengzhou XindaJiean Information Technology Co.,Ltd.', '901A4F': 'EM Microelectronic', 'C84F0E': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '6CD2BA': 'zte corporation', '303ABA': 'Guangzhou BaoLun Electronics Co., Ltd', 'D88ADC': 'Huawei Device Co., Ltd.', '10E953': 'Huawei Device Co., Ltd.', '7C48B2': 'Vida Resources Lte Ltd', '2CAB33': 'Texas Instruments', 'B887C6': 'Prudential Technology co.,LTD', 'EC9C32': 'Sichuan\xa0AI-Link\xa0Technology\xa0Co.,\xa0Ltd.', '4CADA8': 'PANOPTICS CORP.', 'FC1CA1': 'Nokia', 'D42DC5': 'Panasonic i-PRO Sensing Solutions Co., Ltd.', 'E8D03C': 'Shenzhen Jingxun Software Telecommunication Technology Co.,Ltd', '1C1ADF': 'Microsoft Corporation', 'D4F547': 'Google, Inc.', '981BB5': 'ASSA ABLOY Korea Co., Ltd iRevo', '34CB1A': 'Procter & Gamble Company', 'F0B107': 'Ericsson AB', '784F9B': 'Juniper Networks', '9CFFC2': 'AVI Systems GmbH', '44D878': 'Hui Zhou Gaoshengda Technology Co.,LTD', 'A0D807': 'Huawei Device Co., Ltd.', '2C780E': 'Huawei Device Co., Ltd.', '34B20A': 'Huawei Device Co., Ltd.', '98F4AB': 'Espressif Inc.', 'D8BFC0': 'Espressif Inc.', '202681': 'TECNO MOBILE LIMITED', 'F4D620': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '64FB92': 'PPC Broadband Inc.', '141346': 'Skyworth Digital Technology(Shenzhen) Co.,Ltd', '949034': 'SHENZHEN CHUANGWEI-RGB ELECTRONICS CO.,LTD', '987A10': 'Ericsson AB', '542BDE': 'New H3C Technologies Co., Ltd', '98F781': 'ARRIS Group, Inc.', '7897C3': 'DINGXIN INFORMATION TECHNOLOGY CO.,LTD', '4C90DB': 'JL Audio', 'B899AE': 'Shenzhen MiaoMing Intelligent Technology Co.,Ltd', 'E8D0B9': 'Taicang T&W Electronics', '3C8F06': 'Shenzhen Libtor Technology Co.,Ltd', 'B00875': 'HUAWEI TECHNOLOGIES CO.,LTD', '8CF112': 'Motorola Mobility LLC, a Lenovo Company', '847637': 'HUAWEI TECHNOLOGIES CO.,LTD', 'FC9435': 'HUAWEI TECHNOLOGIES CO.,LTD', 'E02481': 'HUAWEI TECHNOLOGIES CO.,LTD', '44AEAB': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'A4F05E': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '5885A2': 'Realme Chongqing MobileTelecommunications Corp Ltd', 'A8C0EA': 'Pepwave Limited', '182AD3': 'Juniper Networks', '80B07B': 'zte corporation', 'C85A9F': 'zte corporation', '1C687E': 'Shenzhen Qihu Intelligent Technology Company Limited', 'C03656': 'Fiberhome Telecommunication Technologies Co.,LTD', '2CF89B': 'Cisco Systems, Inc', '00071C': 'AT&T', 'E0E8E6': 'Shenzhen C-Data Technology Co., Ltd.', '500291': 'Espressif Inc.', '001DDF': 'Sunitec Enterprise Co.,Ltd', '8C0FFA': 'Hutec co.,ltd', 'ACFE05': 'ITEL MOBILE LIMITED', 'BCC31B': 'Kygo Life A', '782A79': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '786559': 'Sagemcom Broadband SAS', '50D2F5': 'Beijing Xiaomi Mobile Software Co., Ltd', '24526A': 'Zhejiang Dahua Technology Co., Ltd.', '20DFB9': 'Google, Inc.', '5CCAD3': 'CHIPSEA TECHNOLOGIES (SHENZHEN) CORP.', '28167F': 'Xiaomi Communications Co Ltd', '087190': 'Intel Corporate', 'B03E51': 'BSkyB Ltd', '5CE883': 'HUAWEI TECHNOLOGIES CO.,LTD', '100177': 'HUAWEI TECHNOLOGIES CO.,LTD', '44A191': 'HUAWEI TECHNOLOGIES CO.,LTD', '6023A4': 'Sichuan\xa0AI-Link\xa0Technology\xa0Co.,\xa0Ltd.', 'A4530E': 'Cisco Systems, Inc', '00403A': 'IMPACT TECHNOLOGIES', '9C28BF': 'Continental Automotive Czech Republic s.r.o.', '807215': 'BSkyB Ltd', '74D637': 'Amazon Technologies Inc.', 'D05F64': 'IEEE Registration Authority', '7484E1': 'Dongguan Haoyuan Electronics Co.,Ltd', '64DF10': 'JingLue Semiconductor(SH) Ltd.', 'C463FB': 'Neatframe AS', 'C8B1CD': 'Apple, Inc.', '1460CB': 'Apple, Inc.', 'B8F12A': 'Apple, Inc.', 'F887F1': 'Apple, Inc.', '0C8126': 'Juniper Networks', '305714': 'Apple, Inc.', '60447A': 'Water-i.d. GmbH', '04A222': 'Arcadyan Corporation', '04AB6A': 'Chun-il Co.,Ltd.', '544E45': 'Private', '04C807': 'Xiaomi Communications Co Ltd', '28FE65': 'DongGuan Siyoto Electronics Co., Ltd ', '1806F5': 'RAD Data Communications, Ltd.', '489BD5': 'Extreme Networks, Inc.', '3C8C93': 'Juniper Networks', 'E454E8': 'Dell Inc.', '683F1E': 'EFFECT Photonics B.V.', '44FB5A': 'zte corporation', '4459E3': 'HUAWEI TECHNOLOGIES CO.,LTD', '4074E0': 'Intel Corporate', '20968A': 'China Mobile (Hangzhou) Information Technology Co., Ltd.', '8C1850': 'China Mobile (Hangzhou) Information Technology Co., Ltd.', 'D8D4E6': 'Hytec Inter Co., Ltd.', '840B7C': 'Hitron Technologies. Inc', 'C85D38': 'HUMAX Co., Ltd.', 'DC54D7': 'Amazon Technologies Inc.', '44D3CA': 'Cisco Systems, Inc', '0035FF': 'Texas Instruments', '889FAA': 'Hella Gutmann Solutions GmbH ', '48A73C': 'Sichuan tianyi kanghe communications co., LTD', 'F8E7A0': 'vivo Mobile Communication Co., Ltd.', '2CFFEE': 'vivo Mobile Communication Co., Ltd.', '1449BC': 'DrayTek Corp.', '20F478': 'Xiaomi Communications Co Ltd', '8C04BA': 'Dell Inc.', '6C2990': 'WiZ Connected Lighting Company Limited', '9835ED': 'HUAWEI TECHNOLOGIES CO.,LTD', '04885F': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F8A763': 'Zhejiang Tmall Technology Co., Ltd.', 'A49813': 'ARRIS Group, Inc.', '084F0A': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A8494D': 'HUAWEI TECHNOLOGIES CO.,LTD', '44004D': 'HUAWEI TECHNOLOGIES CO.,LTD', '18CF24': 'HUAWEI TECHNOLOGIES CO.,LTD', '50F8A5': 'eWBM Co., Ltd.', '807693': 'Newag SA', 'BC9740': 'IEEE Registration Authority', 'C850CE': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D89B3B': 'HUAWEI TECHNOLOGIES CO.,LTD', '88403B': 'HUAWEI TECHNOLOGIES CO.,LTD', 'FC8743': 'HUAWEI TECHNOLOGIES CO.,LTD', '90735A': 'Motorola Mobility LLC, a Lenovo Company', '1C8259': 'IEEE Registration Authority', '000BE4': 'Hosiden Corporation', '0004DF': 'TERACOM TELEMATICA S.A', '4413D0': 'zte corporation', '2462AB': 'Espressif Inc.', '88B436': 'Private', '7438B7': 'CANON INC.', '00FA21': 'Samsung Electronics Co.,Ltd', '7C2302': 'Samsung Electronics Co.,Ltd', '18B6F7': 'NEW POS TECHNOLOGY LIMITED', '5CB15F': 'Oceanblue Cloud Technology Limited', '18AACA': 'Sichuan tianyi kanghe communications co., LTD', 'D49DC0': 'Samsung Electronics Co.,Ltd', 'D0196A': 'Ciena Corporation', '84FDD1': 'Intel Corporate', '6CAB05': 'Cisco Systems, Inc', 'B0700D': 'Nokia', '60D248': 'ARRIS Group, Inc.', '501395': 'Sichuan\xa0AI-Link\xa0Technology\xa0Co.,\xa0Ltd.', '18D9EF': 'Shuttle Inc.', '88DA33': 'Beijing Xiaoyuer Network Technology Co., Ltd', '84C78F': 'STORDIS GmbH', '80DABC': 'Megafone Limited', '002175': 'Pacific Satellite International Ltd.', '184644': 'Home Control Singapore Pte Ltd', 'C09FE1': 'zte corporation', '485DEB': 'Just Add Power', '5041B9': 'I-O DATA DEVICE,INC.', '346B5B': 'New H3C Technologies Co., Ltd', '84E892': 'Actiontec Electronics, Inc', '8C0FA0': 'di-soric GmbH & Co. KG', 'DCB808': 'Extreme Networks, Inc.', '78E2BD': 'Vodafone Automotive S.p.A.', 'F848FD': 'China Mobile Group Device Co.,Ltd.', 'C821DA': 'Shenzhen YOUHUA Technology Co., Ltd', 'E0B655': 'Beijing Xiaomi Electronics Co., Ltd.', '20DA22': 'HUAWEI TECHNOLOGIES CO.,LTD', '1CDE57': 'Fiberhome Telecommunication Technologies Co.,LTD', 'E0DCFF': 'Xiaomi Communications Co Ltd', '608CDF': 'Private', '00778D': 'Cisco Systems, Inc', '000E8C': 'Siemens AG', '008764': 'Cisco Systems, Inc', '20658E': 'HUAWEI TECHNOLOGIES CO.,LTD', '183D5E': 'HUAWEI TECHNOLOGIES CO.,LTD', '889746': 'Sichuan\xa0AI-Link\xa0Technology\xa0Co.,\xa0Ltd.', '846FCE': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'C468D0': 'VTech Telecommunications Ltd.', '14AEDB': 'VTech Telecommunications Ltd.', '78DB2F': 'Texas Instruments', 'B0E71D': 'Shanghai Maigantech Co.,Ltd', 'F8BBBF': 'eero inc.', 'DCFB48': 'Intel Corporate', 'FC3342': 'Juniper Networks', 'DC7137': 'zte corporation', '3441A8': 'ER-Telecom', '34DB9C': 'Sagemcom Broadband SAS', '7440BE': 'LG Innotek', '804A14': 'Apple, Inc.', '703C69': 'Apple, Inc.', '847C9B': 'GD Midea Air-Conditioning Equipment Co.,Ltd.', 'A8E2C3': 'Shenzhen YOUHUA Technology Co., Ltd', '0CA06C': 'Industrial Cyber Sensing Inc.', 'FCD2B6': 'IEEE Registration Authority', '9020C2': 'Aruba, a Hewlett Packard Enterprise Company', '04D4C4': 'ASUSTek COMPUTER INC.', 'FCB662': 'IC Holdings LLC', '48049F': 'ELECOM CO., LTD', '087F98': 'vivo Mobile Communication Co., Ltd.', 'AC2DA9': 'TECNO MOBILE LIMITED', '7488BB': 'Cisco Systems, Inc', 'A4CF12': 'Espressif Inc.', 'C85261': 'ARRIS Group, Inc.', 'C04121': 'Nokia', '70BF92': 'GN Audio A/S', '4C6AF6': 'HMD Global Oy', '489DD1': 'Samsung Electronics Co.,Ltd', 'B06FE0': 'Samsung Electronics Co.,Ltd', 'A0510B': 'Intel Corporate', 'F0D4E2': 'Dell Inc.', '40A93F': 'Pivotal Commware, Inc.', '44B994': 'Douglas Lighting Controls', 'C08C71': 'Motorola Mobility LLC, a Lenovo Company', 'F46F4E': 'Echowell', '2C3F0B': 'Cisco Meraki', '5C8816': 'Rockwell Automation', '002F5C': 'Cisco Systems, Inc', 'F4645D': 'Toshiba', '00CB51': 'Sagemcom Broadband SAS', 'C464B7': 'Fiberhome Telecommunication Technologies Co.,LTD', '2479F8': 'KUPSON spol. s r.o.', '38F85E': 'HUMAX Co., Ltd.', 'ACBB61': 'YSTen Technology Co.,Ltd', '180D2C': 'Intelbras', '08ECF5': 'Cisco Systems, Inc', 'D07650': 'IEEE Registration Authority', '60D0A9': 'Samsung Electronics Co.,Ltd', '88CEFA': 'HUAWEI TECHNOLOGIES CO.,LTD', '002706': 'YOISYS', '042DB4': 'First Property (Beijing) Co., Ltd Modern MOMA Branch', '342003': 'Shenzhen Feitengyun Technology Co.,LTD', '7CFD82': 'GUANGDONG GENIUS TECHNOLOGY CO., LTD.', 'EC4118': 'XIAOMI Electronics,CO.,LTD', 'D8860B': 'IEEE Registration Authority', '04E0B0': 'Shenzhen YOUHUA Technology Co., Ltd', 'F07D68': 'D-Link Corporation', '98DAC4': 'TP-LINK TECHNOLOGIES CO.,LTD.', '40E3D6': 'Aruba, a Hewlett Packard Enterprise Company', 'B45D50': 'Aruba, a Hewlett Packard Enterprise Company', 'ACA31E': 'Aruba, a Hewlett Packard Enterprise Company', '38B19E': 'IEEE Registration Authority', '38E26E': 'ShenZhen Sweet Rain Electronics Co.,Ltd.', '70C9C6': 'Cisco Systems, Inc', '689A87': 'Amazon Technologies Inc.', '64AE88': 'Polytec GmbH', '00D050': 'Iskratel d.o.o.', '78DAA2': 'Cynosure Technologies Co.,Ltd', '00177B': 'Azalea Networks inc', '8084A9': 'oshkosh Corporation', 'D4B92F': 'Technicolor CH USA Inc.', '502B98': 'Es-tech International', 'C82832': 'Beijing Xiaomi Electronics Co., Ltd.', '946A77': 'Technicolor CH USA Inc.', 'F84DFC': 'Hangzhou Hikvision Digital Technology Co.,Ltd.', '001BB5': 'Cherry GmbH', '701BFB': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '04766E': 'ALPS ELECTRIC CO., LTD.', 'AC7A4D': 'ALPS ELECTRIC CO., LTD.', '38C096': 'ALPS ELECTRIC CO., LTD.', 'C4346B': 'Hewlett Packard', '48F17F': 'Intel Corporate', '002643': 'ALPS ELECTRIC CO., LTD.', 'F46E95': 'Extreme Networks, Inc.', '004E35': 'Hewlett Packard Enterprise', '4CC7D6': 'FLEXTRONICS MANUFACTURING(ZHUHAI)CO.,LTD.', 'C80873': 'Ruckus Wireless', 'BC3E07': 'Hitron Technologies. Inc', '48FDA3': 'Xiaomi Communications Co Ltd', '288088': 'NETGEAR', '1C3477': 'Innovation Wireless', '64CE6E': 'Sierra Wireless', '001697': 'NEC Corporation', '003013': 'NEC Corporation', '049DFE': 'Hivesystem', 'D05157': 'LEAX Arkivator Telecom', '0CEC84': 'Shenzhen TINNO Mobile Technology Corp.', '9CDB07': 'Thum+Mahr GmbH', 'FC94CE': 'zte corporation', '90869B': 'zte corporation', 'E0189F': 'EM Microelectronic', 'F81308': 'Nokia', '9458CB': 'Nintendo Co.,Ltd', '84DB2F': 'Sierra Wireless', 'DCEB69': 'Technicolor CH USA Inc.', '28EC9A': 'Texas Instruments', 'C4E506': 'Piper Networks, Inc.', 'F8A2D6': 'Liteon Technology Corporation', '74366D': 'Vodafone Italia S.p.A.', 'F80F6F': 'Cisco Systems, Inc', 'FCBE7B': 'vivo Mobile Communication Co., Ltd.', 'B40FB3': 'vivo Mobile Communication Co., Ltd.', 'EC5C68': 'CHONGQING FUGUI ELECTRONICS CO.,LTD.', '182A44': 'HIROSE ELECTRONIC SYSTEM', '30EB5A': 'LANDIS + GYR', 'CCD3C1': 'Vestel Elektronik San ve Tic. A.Ş.', 'D8F2CA': 'Intel Corporate', 'B4C62E': 'Molex CMS', '0CD0F8': 'Cisco Systems, Inc', '282536': 'SHENZHEN HOLATEK CO.,LTD', 'B8A175': 'Roku, Inc.', 'B8259A': 'Thalmic Labs ', '0080E3': 'CORAL NETWORK CORPORATION', 'DCDA80': 'New H3C Technologies Co., Ltd', '6CA928': 'HMD Global Oy', '00D861': 'Micro-Star INTL CO., LTD.', '74C17D': 'Infinix mobility limited', 'F85B9C': 'SB SYSTEMS Co.,Ltd', '8871B1': 'ARRIS Group, Inc.', 'F0AF85': 'ARRIS Group, Inc.', 'FCAE34': 'ARRIS Group, Inc.', '745F90': 'LAM Technologies', 'A42655': 'LTI Motion (Shanghai) Co., Ltd.', '60A730': 'Shenzhen Yipinfang Internet Technology Co.,Ltd', '3C9BD6': 'Vizio, Inc', '50DB3F': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', '1081B4': 'Hunan Greatwall Galaxy Science and Technology Co.,Ltd.', 'D43260': 'GoPro', 'F4DD9E': 'GoPro', 'D4D919': 'GoPro', '141114': 'TECNO MOBILE LIMITED', '00B8B3': 'Cisco Systems, Inc', '64EEB7': 'Netcore Technology Inc', 'CC7286': "Xi'an Fengyu Information Technology Co., Ltd.", '004279': 'Sunitec Enterprise Co.,Ltd', 'A45046': 'Xiaomi Communications Co Ltd', '1C24CD': 'Askey Computer Corp.', '3881D7': 'Texas Instruments', '1804ED': 'Texas Instruments', 'B47748': 'Shenzhen Neoway Technology Co.,Ltd.', 'F8501C': 'Tianjin Geneuo Technology Co.,Ltd', '007C2D': 'Samsung Electronics Co.,Ltd', '44070B': 'Google, Inc.', 'ECF6BD': 'SNCF MOBILITÉS', 'B831B5': 'Microsoft Corporation', '00D6FE': 'Cisco Systems, Inc', '0CBF74': 'Morse Micro', 'B41D2B': 'Shenzhen YOUHUA Technology Co., Ltd', '70D313': 'HUAWEI TECHNOLOGIES CO.,LTD', '9C1D36': 'HUAWEI TECHNOLOGIES CO.,LTD', 'CCBBFE': 'HUAWEI TECHNOLOGIES CO.,LTD', 'FC8F7D': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', '24BE18': 'DADOUTEK COMPANY LIMITED', '749D79': 'Sercomm Corporation.', 'F8C249': 'Private', '14C213': 'Apple, Inc.', 'A4D931': 'Apple, Inc.', 'BCFED9': 'Apple, Inc.', '808223': 'Apple, Inc.', '38B4D3': 'BSH Hausgeraete GmbH', 'E89363': 'Nokia', '7C0CF6': 'Guangdong Huiwei High-tech Co., Ltd.', '20AD56': 'Continental Automotive Systems Inc.', '5029F5': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '300A60': 'IEEE Registration Authority', 'CC08FB': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'BCAF91': 'TE Connectivity Sensor Solutions', 'F0D7DC': 'Wesine (Wuhan) Technology Co., Ltd.', '80D065': 'CKS Corporation', 'BCF310': 'Aerohive Networks Inc.', '007204': 'Samsung Electronics Co., Ltd. ARTIK', '40C81F': 'Shenzhen Xinguodu Technology Co., Ltd.', 'C84782': 'Areson Technology Corp.', '1459C0': 'NETGEAR', '283166': 'vivo Mobile Communication Co., Ltd.', 'C04004': 'Medicaroid Corporation', 'A4ED43': 'IEEE Registration Authority', '0C7C28': 'Nokia', '6843D7': 'Agilecom Photonics Solutions Guangdong Limited', '20D80B': 'Juniper Networks', '94298D': 'Shanghai AdaptComm Technology Co., Ltd.', '00AA6E': 'Cisco Systems, Inc', '8C61A3': 'ARRIS Group, Inc.', 'B86A97': 'Edgecore Networks Corporation', '00040B': '3COM EUROPE LTD', '000A5E': '3COM', '00105A': '3COM', '8C8F8B': 'China Mobile Chongqing branch', '006097': '3COM', '006008': '3COM', '000102': '3COM', 'A81087': 'Texas Instruments', 'C8C2F5': 'FLEXTRONICS MANUFACTURING(ZHUHAI)CO.,LTD.', 'F05849': 'CareView Communications', '34E5EC': 'Palo Alto Networks', '8CFE74': 'Ruckus Wireless', '342912': 'HUAWEI TECHNOLOGIES CO.,LTD', 'E43493': 'HUAWEI TECHNOLOGIES CO.,LTD', '604BAA': 'Magic Leap, Inc.', '4C0143': 'eero inc.', 'A02833': 'IEEE Registration Authority', '001B6E': 'Keysight Technologies, Inc.', '582F40': 'Nintendo Co.,Ltd', '0890BA': 'Danlaw Inc', '4C364E': 'Panasonic Corporation Connected Solutions Company', 'BCA58B': 'Samsung Electronics Co.,Ltd', '94A3CA': 'KonnectONE, LLC', 'D0D3FC': 'Mios, Ltd.', '6C6CD3': 'Cisco Systems, Inc', 'E049ED': 'Audeze LLC', '143719': 'PT Prakarsa Visi Valutama', '48A472': 'Intel Corporate', 'E85D86': 'CHANG YOW TECHNOLOGIES INTERNATIONAL CO.,LTD.', '0440A9': 'New H3C Technologies Co., Ltd', '8030E0': 'Hewlett Packard Enterprise', 'A8016D': 'Aiwa Corporation', '80CEB9': 'Samsung Electronics Co.,Ltd', '14D169': 'HUAWEI TECHNOLOGIES CO.,LTD', '000157': 'SYSWAVE CO., LTD', '0020B5': 'YASKAWA ELECTRIC CORPORATION', 'B8BEF4': 'devolo AG', '244CE3': 'Amazon Technologies Inc.', '286336': 'Siemens AG', 'E06267': 'Xiaomi Communications Co Ltd', '70B7AA': 'vivo Mobile Communication Co., Ltd.', '58FDBE': 'Shenzhen Taikaida Technology Co., Ltd', 'F4F197': 'EMTAKE Inc', '6CED51': 'NEXCONTROL Co.,Ltd', '1062E5': 'Hewlett Packard', '04C3E6': 'IEEE Registration Authority', '002622': 'COMPAL INFORMATION (KUNSHAN) CO., LTD. ', '84B31B': 'Kinexon GmbH', 'F8272E': 'Mercku', '98BB99': 'Phicomm (Sichuan) Co.,Ltd.', '2866E3': 'AzureWave Technology Inc.', '848A8D': 'Cisco Systems, Inc', '1CC3EB': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'F460E2': 'Xiaomi Communications Co Ltd', 'E4D124': ' Mojo Networks, Inc.', '0013A3': 'Siemens Home & Office Comm. Devices', '082525': 'Xiaomi Communications Co Ltd', '9CC950': 'Baumer Holding', '60F18A': 'HUAWEI TECHNOLOGIES CO.,LTD', '504C7E': 'THE 41ST INSTITUTE OF CETC', 'B0B867': 'Hewlett Packard Enterprise', 'C00380': 'Juniper Networks', 'C49500': 'Amazon Technologies Inc.', '68DD26': 'Shanghai Focus Vision Security Technology Co.,Ltd', 'F89910': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '50E0EF': 'Nokia', 'CC50E3': 'Espressif Inc.', '001413': 'Trebing & Himstedt Prozeßautomation GmbH & Co. KG', '000799': 'Tipping Point Technologies, Inc.', 'D01CBB': 'Beijing Ctimes Digital Technology Co., Ltd.', 'C0B6F9': 'Intel Corporate', 'D8B6B7': 'Comtrend Corporation', '8C14B4': 'zte corporation', '7487BB': 'Ciena Corporation', 'EC3873': 'Juniper Networks', '3C9872': 'Sercomm Corporation.', '40C3C6': 'SnapRoute', '000EEE': 'Muco Industrie BV', '7C1C4E': 'LG Innotek', '144F8A': 'Intel Corporate', '002106': 'RIM Testing Services', '00409D': 'DigiBoard', '2C4759': 'Beijing MEGA preponderance Science & Technology Co. Ltd', '00138A': 'Qingdao GoerTek Technology Co., Ltd.', 'A830AD': 'WEIFANG GOERTEK ELECTRONICS CO.,LTD', 'A41566': 'WEIFANG GOERTEK ELECTRONICS CO.,LTD', '1C965A': 'WEIFANG GOERTEK ELECTRONICS CO.,LTD', '401B5F': 'WEIFANG GOERTEK ELECTRONICS CO.,LTD', 'DCE0EB': 'Nanjing Aozheng Information Technology Co.Ltd', 'BC5FF6': 'MERCURY COMMUNICATION TECHNOLOGIES CO.,LTD.', 'A83E0E': 'HMD Global Oy', '10C172': 'HUAWEI TECHNOLOGIES CO.,LTD', 'DCE305': 'ZAO NPK Rotek', 'A4DA32': 'Texas Instruments', '780473': 'Texas Instruments', '00151E': 'ETHERNET Powerlink Standarization Group (EPSG)', '00111E': 'ETHERNET Powerlink Standarization Group (EPSG)', 'C8E7D8': 'MERCURY COMMUNICATION TECHNOLOGIES CO.,LTD.', 'AC3B77': 'Sagemcom Broadband SAS', '60D21C': 'Sunnovo International Limited', 'CC51B4': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '00C3F4': 'Samsung Electronics Co.,Ltd', '78725D': 'Cisco Systems, Inc', 'B48655': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D0D783': 'HUAWEI TECHNOLOGIES CO.,LTD', '5CCD7C': 'MEIZU Technology Co.,Ltd.', 'EC8C9A': 'HUAWEI TECHNOLOGIES CO.,LTD', '149346': 'PNI sensor corporation', '5C9656': 'AzureWave Technology Inc.', 'E06066': 'Sercomm Corporation.', 'B88AEC': 'Nintendo Co.,Ltd', 'A8E552': 'JUWEL Aquarium AG & Co. KG', '8CCF5C': 'BEFEGA GmbH', 'B46BFC': 'Intel Corporate', 'B0FC0D': 'Amazon Technologies Inc.', 'CCC92C': 'Schindler - PORT Technology', '001E39': 'Comsys Communication Ltd.', '048AE1': 'FLEXTRONICS MANUFACTURING(ZHUHAI)CO.,LTD.', '7836CC': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'F0B5D1': 'Texas Instruments', '00E000': 'FUJITSU LIMITED', '90848B': 'HDR10+ Technologies, LLC', 'FCE66A': 'Industrial Software Co', '70C833': 'Wirepas Oy', '0C73EB': 'IEEE Registration Authority', '0CF5A4': 'Cisco Systems, Inc', '80C7C5': 'Fiberhome Telecommunication Technologies Co.,LTD', '2816A8': 'Microsoft Corporation', 'F8F532': 'ARRIS Group, Inc.', 'B083D6': 'ARRIS Group, Inc.', 'B0416F': 'Shenzhen Maxtang Computer Co.,Ltd', '10B36F': 'Bowei Technology Company Limited ', 'FC9BC6': 'Sumavision Technologies Co.,Ltd', 'C8292A': 'Barun Electronics', '0080BA': 'SPECIALIX (ASIA) PTE, LTD', '480BB2': 'IEEE Registration Authority', '501D93': 'HUAWEI TECHNOLOGIES CO.,LTD', 'CCC079': 'Murata Manufacturing Co., Ltd.', 'F09CD7': 'Guangzhou Blue Cheetah Intelligent Technology Co., Ltd.', 'BCE143': 'Apple, Inc.', 'E482CC': 'Jumptronic GmbH', 'C8D779': 'QING DAO HAIER TELECOM CO.,LTD.', 'E4434B': 'Dell Inc.', '48605F': 'LG Electronics (Mobile Communications)', '30D9D9': 'Apple, Inc.', '6030D4': 'Apple, Inc.', 'F895EA': 'Apple, Inc.', '18F1D8': 'Apple, Inc.', '647033': 'Apple, Inc.', '846878': 'Apple, Inc.', 'C8D083': 'Apple, Inc.', 'BCAB7C': 'TRnP KOREA Co Ltd', '9C2EA1': 'Xiaomi Communications Co Ltd', '089734': 'Hewlett Packard Enterprise', '7C7668': 'HUAWEI TECHNOLOGIES CO.,LTD', '6C3838': 'Marking System Technology Co., Ltd.', 'E019D8': 'BH TECHNOLOGIES', '0C6ABC': 'Fiberhome Telecommunication Technologies Co.,LTD', '3CCD5D': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A438CC': 'Nintendo Co.,Ltd', '74721E': 'Edison Labs Inc.', '780F77': 'HangZhou Gubei Electronics Technology Co.,Ltd', '001386': 'ABB Inc/Totalflow', '003C10': 'Cisco Systems, Inc', 'F041C8': 'IEEE Registration Authority', 'CC9916': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', 'EC7FC6': 'ECCEL CORPORATION SAS', '4CABFC': 'zte corporation', '7C2A31': 'Intel Corporate', '8C4CDC': 'PLANEX COMMUNICATIONS INC.', '5065F3': 'Hewlett Packard', '3C9509': 'Liteon Technology Corporation', '64CB5D': 'SIA TeleSet', '30FD38': 'Google, Inc.', '0CF346': 'Xiaomi Communications Co Ltd', '5821E9': 'TWPI', 'F0E3DC': 'Tecon MT, LLC', 'A8DA01': 'Shenzhen NUOLIJIA Digital Technology Co.,Ltd', '7C2586': 'Juniper Networks', '88E90F': 'innomdlelab', '703A73': 'Shenzhen Sundray Technologies Company Limited', '10F9EB': 'Industria Fueguina de Relojería Electrónica s.a.', '80AD16': 'Xiaomi Communications Co Ltd', '044EAF': 'LG Innotek', '1894C6': 'ShenZhen Chenyee Technology Co., Ltd.', '002194': 'Ping Communication', '5C5AEA': 'FORD', '000B7B': 'Test-Um Inc.', '54A65C': 'Technicolor CH USA Inc.', 'BCDDC2': 'Espressif Inc.', '0010D8': 'CALISTA', '7CFF4D': 'AVM Audiovisuelles Marketing und Computersysteme GmbH', '7470FD': 'Intel Corporate', 'C88F26': 'Skyworth Digital Technology(Shenzhen) Co.,Ltd', '98D863': 'Shanghai High-Flying Electronics Technology Co., Ltd', 'C49F4C': 'HUAWEI TECHNOLOGIES CO.,LTD', '0C704A': 'HUAWEI TECHNOLOGIES CO.,LTD', '0C41E9': 'HUAWEI TECHNOLOGIES CO.,LTD', '3CE824': 'HUAWEI TECHNOLOGIES CO.,LTD', '54B7E5': 'Rayson Technology Co., Ltd.', '946372': 'vivo Mobile Communication Co., Ltd.', 'BC0FA7': 'Ouster', '70C94E': 'Liteon Technology Corporation', '70D081': 'Beijing Netpower Technologies Inc.', '003074': 'EQUIINET LTD.', 'EC9B8B': 'Hewlett Packard Enterprise', '40BD32': 'Texas Instruments', 'CC8E71': 'Cisco Systems, Inc', '38F554': 'HISENSE ELECTRIC CO.,LTD', '18A28A': 'Essel-T Co., Ltd', '20365B': 'Megafone Limited', 'E8DE00': 'ChongQing GuanFang Technology Co.,LTD', 'FC643A': 'Samsung Electronics Co.,Ltd', 'A8515B': 'Samsung Electronics Co.,Ltd', 'B4FBF9': 'HUAWEI TECHNOLOGIES CO.,LTD', '506F77': 'HUAWEI TECHNOLOGIES CO.,LTD', 'B0B3AD': 'HUMAX Co., Ltd.', '001936': 'STERLITE OPTICAL TECHNOLOGIES LIMITED', 'F0C9D1': 'GD Midea Air-Conditioning Equipment Co.,Ltd.', 'F8C120': "Xi'an Link-Science Technology Co.,Ltd", '347E5C': 'Sonos, Inc.', '88B6EE': 'Dish Technologies Corp', '345A06': 'SHARP Corporation', 'B89F09': 'Wistron Neweb Corporation', '0402CA': 'Shenzhen Vtsonic Co.,ltd', '3CFB5C': 'Fiberhome Telecommunication Technologies Co.,LTD', '7440BB': 'Hon Hai Precision Ind. Co.,Ltd.', 'B4DE31': 'Cisco Systems, Inc', 'A44027': 'zte corporation', 'B4F7A1': 'LG Electronics (Mobile Communications)', '70F220': 'Actiontec Electronics, Inc', '1C1161': 'Ciena Corporation', '88BD45': 'Samsung Electronics Co.,Ltd', '54FCF0': 'Samsung Electronics Co.,Ltd', '306A85': 'Samsung Electronics Co.,Ltd', '4CDD31': 'Samsung Electronics Co.,Ltd', 'C87765': 'Tiesse SpA', 'D0817A': 'Apple, Inc.', '98CA33': 'Apple, Inc.', '28EDE0': 'AMPAK Technology, Inc.', '68AB1E': 'Apple, Inc.', '70EF00': 'Apple, Inc.', 'BCFFEB': 'Motorola Mobility LLC, a Lenovo Company', '2C37C5': 'Qingdao Haier Intelligent Home Appliance Technology Co.,Ltd', 'CC40D0': 'NETGEAR', '7C7630': 'Shenzhen YOUHUA Technology Co., Ltd', '9822EF': 'Liteon Technology Corporation', '7C7635': 'Intel Corporate', 'B80716': 'vivo Mobile Communication Co., Ltd.', '788038': 'FUNAI ELECTRIC CO., LTD.', 'F045DA': 'Texas Instruments', '1CEEC9': 'Elo touch solutions', '000130': 'Extreme Networks, Inc.', 'FC0A81': 'Extreme Networks, Inc.', '4048FD': 'IEEE Registration Authority', '004097': 'DATEX DIVISION OF', '9C4FCF': 'TCT mobile ltd', 'D896E0': 'Alibaba Cloud Computing Ltd.', 'E4F042': 'Google, Inc.', '20B399': 'Enterasys', 'CC2D21': 'Tenda Technology Co.,Ltd.Dongguan branch', 'A8EEC6': 'Muuselabs NV/SA', '207852': 'Nokia', '001862': 'Seagate Technology', '000C50': 'Seagate Technology', 'F417B8': 'AirTies Wireless Networks', '38F73D': 'Amazon Technologies Inc.', 'C0A00D': 'ARRIS Group, Inc.', 'C8DEC9': 'Coriant', '44D5A5': 'AddOn Computer', 'B8F74A': 'RCNTEC', 'ECF451': 'Arcadyan Corporation', '645106': 'Hewlett Packard', '0C1539': 'Apple, Inc.', '1C330E': 'PernixData', '0C6111': 'Anda Technologies SAC', '342AF1': 'Texas Instruments', '581243': 'AcSiP Technology Corp.', '0022C4': 'epro GmbH', '6C5697': 'Amazon Technologies Inc.', '58C17A': 'Cambium Networks Limited', 'E0AADB': 'Nanjing PANENG Technology Development Co.,Ltd', '0005FF': 'SNS Solutions, Inc.', 'F87B20': 'Cisco Systems, Inc', 'D06726': 'Hewlett Packard Enterprise', 'ECFAF4': 'SenRa Tech Pvt. Ltd', 'A89FEC': 'ARRIS Group, Inc.', '00BE9E': 'Fiberhome Telecommunication Technologies Co.,LTD', '54C57A': 'Sunnovo International Limited', '38AD8E': 'New H3C Technologies Co., Ltd', '34D0B8': 'IEEE Registration Authority', 'F449EF': 'EMSTONE', '54DF24': 'Fiberhome Telecommunication Technologies Co.,LTD', 'AC1DDF': 'IEEE Registration Authority', 'E8D819': 'AzureWave Technology Inc.', '2C8A72': 'HTC Corporation', '38019F': 'SHENZHEN FAST TECHNOLOGIES CO.,LTD', '609BC8': 'Hipad Intelligent Technology Co., Ltd.', '406A8E': 'Hangzhou Puwell OE Tech Ltd.', '1C0FAF': 'Lucid Vision Labs', '88B4A6': 'Motorola Mobility LLC, a Lenovo Company', '28CF08': 'ESSYS', 'B06EBF': 'ASUSTek COMPUTER INC.', '603D26': 'Technicolor CH USA Inc.', '245CCB': 'AXIe Consortium, Inc.', '002128': 'Oracle Corporation', '001C73': 'Arista Networks', 'D88F76': 'Apple, Inc.', '409C28': 'Apple, Inc.', '5CA176': 'SICHUAN TIANYI COMHEART TELECOMCO., LTD', '583879': 'RICOH COMPANY, LTD.', 'F44C70': 'Skyworth Digital Technology(Shenzhen) Co.,Ltd', 'C8E7F0': 'Juniper Networks', '182D98': 'Jinwoo Industrial system', '782D7E': 'TRENDnet, Inc.', '741AE0': 'IEEE Registration Authority', 'EC51BC': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'F079E8': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'D8A534': 'Spectronix Corporation', '00A096': 'MITSUMI ELECTRIC CO.,LTD.', '78617C': 'MITSUMI ELECTRIC CO.,LTD.', '24B209': 'Avaya Inc', 'FC65DE': 'Amazon Technologies Inc.', 'D0B128': 'Samsung Electronics Co.,Ltd', 'BC5451': 'Samsung Electronics Co.,Ltd', '74860B': 'Cisco Systems, Inc', 'BC903A': 'Robert Bosch GmbH', 'B0935B': 'ARRIS Group, Inc.', '601803': 'Daikin Air-conditioning (Shanghai) Co., Ltd.', '78321B': 'D-Link International', '8CFEB4': 'VSOONTECH ELECTRONICS CO., LIMITED', '08661F': 'Palo Alto Networks', '940E6B': 'HUAWEI TECHNOLOGIES CO.,LTD', '64FB50': 'RoomReady/Zdi, Inc.', '74EAC8': 'New H3C Technologies Co., Ltd', 'B4D64E': 'Caldero Limited', 'F89DBB': 'Tintri', 'D8A01D': 'Espressif Inc.', '38E2DD': 'zte corporation', '885DFB': 'zte corporation', '9C65EE': 'DASAN Network Solutions', '78CA04': 'Nokia Corporation', 'DC0C2D': 'WEIFANG GOERTEK ELECTRONICS CO.,LTD', 'D067E5': 'Dell Inc.', '5CE8B7': 'Oraimo Technology Limited', 'CC66B2': 'Nokia', 'C4F312': 'Texas Instruments', '904E91': 'IEEE Registration Authority', '3C11B2': 'Fraunhofer FIT', '104B46': 'Mitsubishi Electric Corporation', '0017C8': 'KYOCERA Display Corporation', '68ECC5': 'Intel Corporate', '34298F': 'IEEE Registration Authority', '5CEA1D': 'Hon Hai Precision Ind. Co.,Ltd.', '181456': 'Nokia Corporation', '58B42D': 'YSTen Technology Co.,Ltd', 'E048D3': 'MOBIWIRE MOBILES (NINGBO) CO.,LTD', 'B009DA': 'Ring Solutions', '00054F': 'Garmin International', '58E28F': 'Apple, Inc.', '787B8A': 'Apple, Inc.', '4C16FC': 'Juniper Networks', '48BCA6': '\u200bASUNG TECHNO CO.,Ltd', '005C86': 'SHENZHEN FAST TECHNOLOGIES CO.,LTD', '288CB8': 'zte corporation', '30053F': 'JTI Co.,Ltd.', 'B8DB1C': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '3C10E6': 'PHAZR Inc.', '904506': 'Tokyo Boeki Medisys Inc.', '0021A1': 'Cisco Systems, Inc', 'FCB698': 'Cambridge Industries(Group) Co.,Ltd.', '0001CD': 'ARtem', '5C546D': 'HUAWEI TECHNOLOGIES CO.,LTD', '78BC1A': 'Cisco Systems, Inc', '000E59': 'Sagemcom Broadband SAS', '101D51': '8Mesh Networks Limited', 'DCEB53': 'Wuhan QianXiao Elecronic Technology CO.,LTD', '0CB937': 'Ubee Interactive Co., Limited', 'EC8AC7': 'Fiberhome Telecommunication Technologies Co.,LTD', '88365F': 'LG Electronics (Mobile Communications)', '84A1D1': 'Sagemcom Broadband SAS', '909D7D': 'ARRIS Group, Inc.', '788C4D': 'Indyme Solutions, LLC', 'FC7F56': 'CoSyst Control Systems GmbH', '2C4053': 'Samsung Electronics Co.,Ltd', '0C8FFF': 'HUAWEI TECHNOLOGIES CO.,LTD', '54B121': 'HUAWEI TECHNOLOGIES CO.,LTD', '2880A2': 'Novatel Wireless Solutions, Inc.', '24B2DE': 'Espressif Inc.', '788102': 'Sercomm Corporation.', '84AA9C': 'MitraStar Technology Corp.', 'F0EFD2': 'TF PAYMENT SERVICE CO., LTD', 'A80C63': 'HUAWEI TECHNOLOGIES CO.,LTD', '5CC307': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F4939F': 'Hon Hai Precision Ind. Co., Ltd.', '000726': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', 'FC8B97': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', '2CAB25': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', '1CA532': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', '001F92': 'Avigilon Corporation', '000C03': 'HDMI Licensing, LLC', '7CBACC': 'IEEE Registration Authority', '94F128': 'Hewlett Packard Enterprise', '101B54': 'HUAWEI TECHNOLOGIES CO.,LTD', 'E0107F': 'Ruckus Wireless', 'C4017C': 'Ruckus Wireless', '04FA3F': 'Opticore Inc.', '0025C4': 'Ruckus Wireless', 'C0C520': 'Ruckus Wireless', '540237': 'Teltronic AG', '70DEF9': 'FAI WAH INTERNATIONAL (HONG KONG) LIMITED', '4CB008': 'Shenzhen Gwelltimes Technology Co.,Ltd', 'E86FF2': 'Actiontec Electronics, Inc', '005018': 'AMIT, Inc.', '3CF591': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '602101': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '7CEB7F': 'Dmet Products Corp.', '8C8580': 'Smart Innovation LLC', 'B0EABC': 'ASKEY COMPUTER CORP', '94C691': 'EliteGroup Computer Systems Co., LTD', 'FC2F6B': 'Everspin Technologies, Inc.', '287B09': 'zte corporation', '404229': 'Layer3TV, Inc', 'A88038': 'ShenZhen MovingComm Technology Co., Limited', '78B28D': 'Beijing Tengling Technology CO.Ltd ', 'F81D90': 'Solidwintech', 'A06A44': 'Vizio, Inc', 'DCBE7A': 'Zhejiang Nurotron Biotechnology Co.', '3438B7': 'HUMAX Co., Ltd.', 'CC0677': 'Fiberhome Telecommunication Technologies Co.,LTD', '784501': 'Biamp Systems', '14780B': 'Varex Imaging Deutschland AG', '88B111': 'Intel Corporate', '54D751': 'Proximus', '0080C2': 'IEEE 802.1 Working Group', '8C395C': 'Bit4id Srl', 'ECF342': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'D4258B': 'Intel Corporate', '041B6D': 'LG Electronics (Mobile Communications)', 'F44156': 'Arrikto Inc.', '50FF20': 'Keenetic Limited', '309C23': 'Micro-Star INTL CO., LTD.', '145E45': 'Kaleao Limited', 'ACAFB9': 'Samsung Electronics Co.,Ltd', 'E45740': 'ARRIS Group, Inc.', '688DB6': 'AETEK INC.', 'B03D96': 'Vision Valley FZ LLC', 'F894C2': 'Intel Corporate', '986C5C': 'Jiangxi Gosun Guard Security Co.,Ltd', 'D8C8E9': 'Phicomm (Shanghai) Co., Ltd.', '7CB960': 'Shanghai X-Cheng telecom LTD', '00F82C': 'Cisco Systems, Inc', '00C1B1': 'Cisco Systems, Inc', 'F4FCB1': 'JJ Corp', '24792A': 'Ruckus Wireless', 'B42A0E': 'Technicolor CH USA Inc.', '9CC8AE': 'Becton, Dickinson and Company', 'B0359F': 'Intel Corporate', 'C0D962': 'ASKEY COMPUTER CORP', 'F80BCB': 'Cisco Systems, Inc', '50D37F': 'Yu Fly Mikly Way Science and Technology Co., Ltd.', '181212': 'Cepton Technologies', '70D923': 'vivo Mobile Communication Co., Ltd.', 'B83A08': 'Tenda Technology Co.,Ltd.Dongguan branch', '28B448': 'HUAWEI TECHNOLOGIES CO.,LTD', '100501': 'PEGATRON CORPORATION', '70D379': 'Cisco Systems, Inc', '70DB98': 'Cisco Systems, Inc', '30D386': 'zte corporation', 'A49BF5': 'Hybridserver Tec GmbH', '08028E': 'NETGEAR', 'B47C9C': 'Amazon Technologies Inc.', 'F4A739': 'Juniper Networks', 'F470AB': 'vivo Mobile Communication Co., Ltd.', '2C5A0F': 'Cisco Systems, Inc', '2C3124': 'Cisco Systems, Inc', '503237': 'Apple, Inc.', 'B0481A': 'Apple, Inc.', 'B49CDF': 'Apple, Inc.', '2CFAA2': 'Alcatel-Lucent Enterprise', 'E8E732': 'Alcatel-Lucent Enterprise', '48BF6B': 'Apple, Inc.', '503DA1': 'Samsung Electronics Co.,Ltd', 'F097E5': 'TAMIO, INC', '4C1A3D': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '58821D': 'H. Schomäcker GmbH', 'D8A105': 'Syslane, Co., Ltd.', 'BCA042': 'SHANGHAI FLYCO ELECTRICAL APPLIANCE CO.,LTD', '3C0518': 'Samsung Electronics Co.,Ltd', '900628': 'Samsung Electronics Co.,Ltd', '2C1DB8': 'ARRIS Group, Inc.', 'B4A9FE': 'GHIA Technology (Shenzhen) LTD', '08B258': 'Juniper Networks', '9C84BF': 'Apple, Inc.', '9CFCD1': 'Aetheris Technology (Shanghai) Co., Ltd.', 'AC6B0F': 'CADENCE DESIGN SYSTEMS INC', 'C8B5AD': 'Hewlett Packard Enterprise', '7C3866': 'Texas Instruments', '0C61CF': 'Texas Instruments', '9C1D58': 'Texas Instruments', 'CCCE1E': 'AVM Audiovisuelles Marketing und Computersysteme GmbH', '7C2664': 'Sagemcom Broadband SAS', '0CF4D5': 'Ruckus Wireless', '6C750D': 'WiFiSONG', '3805AC': 'Piller Group GmbH', '346E9D': 'Ericsson AB', '1CA0D3': 'IEEE Registration Authority', 'BC3F8F': 'HUAWEI TECHNOLOGIES CO.,LTD', '143004': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F4DC41': 'YOUNGZONE CULTURE (SHANGHAI) CORP', '00111B': 'Targa Systems Div L-3 Communications', 'C4836F': 'Ciena Corporation', '7CC6C4': 'Kolff Computer Supplies b.v.', '000F4F': 'PCS Systemtechnik GmbH', '6854C1': 'ColorTokens, Inc.', '38AA3C': 'SAMSUNG ELECTRO MECHANICS CO., LTD.', '000302': 'Charles Industries, Ltd.', '142FFD': 'LT SECURITY INC', '50A4D0': 'IEEE Registration Authority', '800010': 'AT&T', '0024F1': 'Shenzhen Fanhai Sanjiang Electronics Co., Ltd.', '0C3CCD': 'Universal Global Scientific Industrial Co., Ltd.', '14ABC5': 'Intel Corporate', '50D213': 'CviLux Corporation', '001E29': 'Hypertherm Inc', 'A80CCA': 'Shenzhen Sundray Technologies Company Limited', '5004B8': 'HUAWEI TECHNOLOGIES CO.,LTD', '5CFF35': 'Wistron Corporation', 'CC9F7A': 'Chiun Mai Communication Systems, Inc', '78F29E': 'PEGATRON CORPORATION', '64777D': 'Hitron Technologies. Inc', '9C50EE': 'Cambridge Industries(Group) Co.,Ltd.', '40ED98': 'IEEE Registration Authority', 'C891F9': 'Sagemcom Broadband SAS', 'ACDCE5': 'Procter & Gamble Company', '00B362': 'Apple, Inc.', 'E4E4AB': 'Apple, Inc.', '60D262': 'Tzukuri Pty Ltd', '8404D2': 'Kirale Technologies SL', '00D0B2': 'Xiotech Corporation', 'F48C50': 'Intel Corporate', '64EB8C': 'Seiko Epson Corporation', 'DCD255': 'Kinpo Electronics, Inc.', '001351': 'Niles Audio Corporation', '54FA96': 'Nokia', '60334B': 'Apple, Inc.', 'A02C36': 'FN-LINK TECHNOLOGY LIMITED', '000320': 'Xpeed, Inc.', 'BCA8A6': 'Intel Corporate', '74FF4C': 'Skyworth Digital Technology(Shenzhen) Co.,Ltd', '002590': 'Super Micro Computer, Inc.', '508A0F': 'SHENZHEN FISE TECHNOLOGY HOLDING CO.,LTD.', '7CCBE2': 'IEEE Registration Authority', 'A8A5E2': 'MSF-Vathauer Antriebstechnik GmbH & Co KG ', '38AFD7': 'FUJITSU LIMITED', '28993A': 'Arista Networks', '68AF13': 'Futura Mobility', '681AB2': 'zte corporation', 'E04FBD': 'SICHUAN TIANYI COMHEART TELECOMCO.,LTD', '7CEBAE': 'Ridgeline Instruments', 'E0508B': 'Zhejiang Dahua Technology Co., Ltd.', '9C1E95': 'Actiontec Electronics, Inc', '60427F': 'SHENZHEN CHUANGWEI-RGB ELECTRONICS CO.,LTD', 'E89EB4': 'Hon Hai Precision Ind. Co.,Ltd.', 'D46A6A': 'Hon Hai Precision Ind. Co.,Ltd.', '98FD74': 'ACT.CO.LTD', 'AC1F6B': 'Super Micro Computer, Inc.', '000B2E': 'Cal-Comp Electronics & Communications Company Ltd.', '4865EE': 'IEEE Registration Authority', 'CCC5EF': 'Co-Comm Servicios Telecomunicaciones S.L.', '9002A9': 'Zhejiang Dahua Technology Co., Ltd.', 'C0288D': 'Logitech, Inc', '6CEC5A': 'Hon Hai Precision Ind. CO.,Ltd.', '44C346': 'HUAWEI TECHNOLOGIES CO.,LTD', '307496': 'HUAWEI TECHNOLOGIES CO.,LTD', '708A09': 'HUAWEI TECHNOLOGIES CO.,LTD', '000064': 'Yokogawa Digital Computer Corporation', 'D0F73B': 'Helmut Mauell GmbH Werk Weida', '180675': 'Dilax Intelcom GmbH', '000FC2': 'Uniwell Corporation', '0C4933': 'Sichuan Jiuzhou Electronic Technology Co., Ltd.', '64DB43': 'Motorola (Wuhan) Mobility Technologies Communication Co., Ltd.', '000E58': 'Sonos, Inc.', 'F4CAE5': 'FREEBOX SAS', '90004E': 'Hon Hai Precision Ind. Co.,Ltd.', '506B8D': 'Nutanix', '0038DF': 'Cisco Systems, Inc', '00FD45': 'Hewlett Packard Enterprise', 'C4BE84': 'Texas Instruments', 'F4F524': 'Motorola Mobility LLC, a Lenovo Company', '00BBC1': 'CANON INC.', '5098F3': 'Rheem Australia Pty Ltd', 'A81E84': 'QUANTA COMPUTER INC. ', '24C1BD': 'CRRC DALIAN R&D CO.,LTD.', '00A2EE': 'Cisco Systems, Inc', '0059DC': 'Cisco Systems, Inc', 'C8D3FF': 'Hewlett Packard', '0013A5': 'General Solutions, LTD.', '9C3DCF': 'NETGEAR', '7C2634': 'ARRIS Group, Inc.', '40F413': 'Rubezh', 'B04BBF': 'PT HAN SUNG ELECTORONICS INDONESIA', 'CC2D83': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '0015FF': 'Novatel Wireless Solutions, Inc.', '248894': 'shenzhen lensun Communication Technology LTD', 'F074E4': 'Thundercomm Technology Co., Ltd', 'A0722C': 'HUMAX Co., Ltd.', 'D46E0E': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'B4D135': 'Cloudistics', '88366C': 'EFM Networks', '48DA96': 'Eddy Smart Home Solutions Inc.', 'E00DB9': 'Cree, Inc.', 'FCD848': 'Apple, Inc.', 'DC0D30': 'Shenzhen Feasycom Technology Co., Ltd.', 'F0ACD7': 'IEEE Registration Authority', '9495A0': 'Google, Inc.', '00A6CA': 'Cisco Systems, Inc', 'C83DD4': 'CyberTAN Technology Inc.', 'E0B94D': 'SHENZHEN BILIAN ELECTRONIC CO.,LTD', '38D547': 'ASUSTek COMPUTER INC.', 'D41D71': 'Palo Alto Networks', '008731': 'Cisco Systems, Inc', '88DEA9': 'Roku, Inc.', 'D8452B': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', 'E4186B': 'Zyxel Communications Corporation', 'EC107B': 'Samsung Electronics Co.,Ltd', '1C232C': 'Samsung Electronics Co.,Ltd', 'FC83C6': 'N-Radio Technologies Co., Ltd.', '78888A': 'CDR Sp. z o.o. Sp. k.', 'F02FA7': 'HUAWEI TECHNOLOGIES CO.,LTD', '18DED7': 'HUAWEI TECHNOLOGIES CO.,LTD', '30B64F': 'Juniper Networks', 'A42983': 'Boeing Defence Australia', 'EC8892': 'Motorola Mobility LLC, a Lenovo Company', '004A77': 'zte corporation', 'A41437': 'Hangzhou Hikvision Digital Technology Co.,Ltd.', '1C9D3E': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '60A10A': 'Samsung Electronics Co.,Ltd', '8C71F8': 'Samsung Electronics Co.,Ltd', 'CC051B': 'Samsung Electronics Co.,Ltd', '8C7712': 'Samsung Electronics Co.,Ltd', '9463D1': 'Samsung Electronics Co.,Ltd', '0021D2': 'Samsung Electronics Co.,Ltd', '0021D1': 'Samsung Electronics Co.,Ltd', '001FCC': 'Samsung Electronics Co.,Ltd', '5C497D': 'Samsung Electronics Co.,Ltd', '98234E': 'Micromedia AG', '503F98': 'CMITECH', '7487A9': 'OCT Technology Co., Ltd.', '782079': 'ID Tech', '0C6076': 'Hon Hai Precision Ind. Co.,Ltd.', '0CEEE6': 'Hon Hai Precision Ind. Co.,Ltd.', 'E4D53D': 'Hon Hai Precision Ind. Co.,Ltd.', 'C0143D': 'Hon Hai Precision Ind. Co.,Ltd.', 'C01885': 'Hon Hai Precision Ind. Co.,Ltd.', '5894CF': 'Vertex Standard LMR, Inc.', '20F85E': 'Delta Electronics', '0023E4': 'IPnect co. ltd.', '70D4F2': 'RIM', '00749C': 'Ruijie Networks Co.,LTD', 'BC20A4': 'Samsung Electronics Co.,Ltd', '08D42B': 'Samsung Electronics Co.,Ltd', '789ED0': 'Samsung Electronics Co.,Ltd', 'B0C4E7': 'Samsung Electronics Co.,Ltd', '0016DB': 'Samsung Electronics Co.,Ltd', '001EE2': 'Samsung Electronics Co.,Ltd', '002490': 'Samsung Electronics Co.,Ltd', '0023D7': 'Samsung Electronics Co.,Ltd', '549B12': 'Samsung Electronics Co.,Ltd', 'FCA13E': 'Samsung Electronics Co.,Ltd', 'A00798': 'Samsung Electronics Co.,Ltd', '7825AD': 'Samsung Electronics Co.,Ltd', 'ECE09B': 'Samsung Electronics Co.,Ltd', '001FCD': 'Samsung Electronics Co.,Ltd', '38ECE4': 'Samsung Electronics Co.,Ltd', '945103': 'Samsung Electronics Co.,Ltd', '001632': 'Samsung Electronics Co.,Ltd', 'E4E0C5': 'Samsung Electronics Co.,Ltd', '78D6F0': 'SAMSUNG ELECTRO MECHANICS CO., LTD.', '20D5BF': 'Samsung Electronics Co.,Ltd', '5CE8EB': 'Samsung Electronics Co.,Ltd', '1C25E1': 'China Mobile IOT Company Limited', 'C0F636': 'Hangzhou Kuaiyue Technologies, Ltd.', '7C11CB': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C81479': 'Samsung Electronics Co.,Ltd', '1CAF05': 'Samsung Electronics Co.,Ltd', '24C696': 'Samsung Electronics Co.,Ltd', 'B0DF3A': 'Samsung Electronics Co.,Ltd', '805719': 'Samsung Electronics Co.,Ltd', '34BE00': 'Samsung Electronics Co.,Ltd', 'C0BDD1': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', 'B479A7': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', '94D771': 'Samsung Electronics Co.,Ltd', 'E84E84': 'Samsung Electronics Co.,Ltd', '240DC2': 'TCT mobile ltd', '78521A': 'Samsung Electronics Co.,Ltd', '04FEA1': 'Fihonest communication co.,Ltd', 'EC8CA2': 'Ruckus Wireless', '00808C': 'NetAlly', '049F81': 'NetAlly', 'A043DB': 'Sitael S.p.A.', 'E0E7BB': 'Nureva, Inc.', '7085C6': 'ARRIS Group, Inc.', '54E2E0': 'ARRIS Group, Inc.', '347A60': 'ARRIS Group, Inc.', '001CC3': 'ARRIS Group, Inc.', '001A22': 'eQ-3 Entwicklung GmbH', '20BBC6': 'Jabil Circuit Hungary Ltd.', '009058': 'Ultra Electronics Command & Control Systems', '001CFD': 'Universal Electronics, Inc.', 'B80018': 'Htel', '7472B0': 'Guangzhou Shiyuan Electronics Co., Ltd. ', 'DC1A01': 'Ecoliv Technology ( Shenzhen ) Ltd.', 'EC8EAE': 'Nagravision SA', 'AC482D': 'Ralinwi Nanjing Electronic Technology Co., Ltd.', '546C0E': 'Texas Instruments', '00549F': 'Avaya Inc', '049FCA': 'HUAWEI TECHNOLOGIES CO.,LTD', '50016B': 'HUAWEI TECHNOLOGIES CO.,LTD', '080087': 'Xyplex, Inc.', '001087': 'XSTREAMIS PLC', '00B0B3': 'XSTREAMIS PLC', '4CB21C': 'Maxphotonics Co.,Ltd', '205EF7': 'Samsung Electronics Co.,Ltd', '141F78': 'Samsung Electronics Co.,Ltd', '002347': 'ProCurve Networking by HP', '0024A8': 'ProCurve Networking by HP', 'C09134': 'ProCurve Networking by HP', '001CEF': 'Primax Electronics Ltd.', '000276': 'Primax Electronics Ltd.', '00001B': 'Novell, Inc.', '001BBA': 'Nortel Networks', '001969': 'Nortel Networks', '0018B0': 'Nortel Networks', '0016CA': 'Nortel Networks', '0004DC': 'Nortel Networks', '000CF7': 'Nortel Networks', '000FCD': 'Nortel Networks', '002363': 'Zhuhai Raysharp Technology Co.,Ltd', '00140E': 'Nortel Networks', '001E1F': 'Nortel Networks', '2824FF': 'Wistron Neweb Corporation', '38256B': 'Microsoft Mobile Oy', '203AEF': 'Sivantos GmbH', '005979': 'Networked Energy Services', '207C8F': 'Quanta Microsystems,Inc.', '000B34': 'ShangHai Broadband Technologies CO.LTD', '3092F6': 'SHANGHAI SUNMON COMMUNICATION TECHNOGY CO.,LTD', 'A8AD3D': 'Alcatel-Lucent Shanghai Bell Co., Ltd', '44DC91': 'PLANEX COMMUNICATIONS INC.', 'E09DB8': 'PLANEX COMMUNICATIONS INC.', '000F59': 'Phonak AG', '74B57E': 'zte corporation', 'D84710': 'Sichuan Changhong Electric Ltd.', '001972': 'Plexus (Xiamen) Co.,ltd.', '24AF4A': 'Alcatel-Lucent IPD', '7C2064': 'Alcatel-Lucent IPD', '48F8E1': 'Nokia', '8C90D3': 'Nokia', '001478': 'TP-LINK TECHNOLOGIES CO.,LTD.', '58BC8F': 'Cognitive Systems Corp.', 'D455BE': 'SHENZHEN FAST TECHNOLOGIES CO.,LTD', '640DCE': 'SHENZHEN MERCURY COMMUNICATION TECHNOLOGIES CO.,LTD.', '54D272': 'Nuki Home Solutions GmbH', 'B8BB23': 'Guangdong Nufront CSC Co., Ltd', '34EA34': 'HangZhou Gubei Electronics Technology Co.,Ltd', 'EC26FB': 'TECC CO.,LTD.', '0020F4': 'SPECTRIX CORPORATION', 'C44BD1': 'Wallys Communications Teachnologies Co.,Ltd.', '6CB9C5': 'Delta Networks, Inc.', '0028F8': 'Intel Corporate', 'B47443': 'Samsung Electronics Co.,Ltd', 'FCF647': 'Fiberhome Telecommunication Technologies Co.,LTD', '18686A': 'zte corporation', 'DC4427': 'IEEE Registration Authority', '04EE91': 'x-fabric GmbH', 'C49A02': 'LG Electronics (Mobile Communications)', '30766F': 'LG Electronics (Mobile Communications)', 'A8922C': 'LG Electronics (Mobile Communications)', '001F6B': 'LG Electronics (Mobile Communications)', '0026E2': 'LG Electronics (Mobile Communications)', 'F80CF3': 'LG Electronics (Mobile Communications)', '9C6121': 'SICHUAN TIANYI COMHEART TELECOMCO.,LTD', '0003B2': 'Radware', '089E01': 'QUANTA COMPUTER INC.', 'E8886C': 'Shenzhen SC Technologies Co.,LTD', 'DC35F1': 'Positivo Informática SA.', '047D7B': 'QUANTA COMPUTER INC.', '00A0C6': 'Qualcomm Inc.', '649C81': 'Qualcomm Inc.', '0024FF': 'QLogic Corporation', '001E21': 'Qisda Corporation', '00039D': 'Qisda Corporation', 'BC3400': 'IEEE Registration Authority', '001A6A': 'Tranzas, Inc.', '3C0771': 'Sony Corporation', '80414E': 'BBK EDUCATIONAL ELECTRONICS CORP.,LTD.', '249442': 'OPEN ROAD SOLUTIONS , INC.', 'C46413': 'Cisco Systems, Inc', 'B808D7': 'HUAWEI TECHNOLOGIES CO.,LTD', '94611E': 'Wata Electronics Co.,Ltd. ', 'A47174': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F4CB52': 'HUAWEI TECHNOLOGIES CO.,LTD', '784476': 'Zioncom Electronics (Shenzhen) Ltd.', '001165': 'ZNYX Networks, Inc.', '00C0E4': 'SIEMENS BUILDING', '000D10': 'Embedtronics Oy', '001FA8': 'Smart Energy Instruments Inc.', '000FDB': 'Westell Technologies Inc.', '0010CA': 'Telco Systems, Inc. ', '00E09E': 'Quantum Corporation', '000A08': 'Alpine Electronics, Inc.', '000A68': 'Solarflare Communications Inc.', '002186': 'Universal Global Scientific Industrial Co., Ltd.', '183919': 'Unicoi Systems', '00080D': 'Toshiba', '000E7B': 'Toshiba', 'E8E0B7': 'Toshiba', 'ACE77B': 'SICHUAN TIANYI COMHEART TELECOMCO.,LTD', '2C36A0': 'Capisco Limited', 'B0B2DC': 'Zyxel Communications Corporation', 'CC5D4E': 'Zyxel Communications Corporation', '404A03': 'Zyxel Communications Corporation', 'C86C87': 'Zyxel Communications Corporation', 'A020A6': 'Espressif Inc.', '58528A': 'Mitsubishi Electric Corporation', '680715': 'Intel Corporate', '3CB6B7': 'vivo Mobile Communication Co., Ltd.', '38F0C8': 'Livestream', '001EC0': 'Microchip Technology Inc.', '645D92': 'SICHUAN TIANYI COMHEART TELECOMCO.,LTD', '38BC1A': 'MEIZU Technology Co., Ltd.', 'C4F1D1': 'BEIJING SOGOU TECHNOLOGY DEVELOPMENT CO., LTD.', 'E0885D': 'Technicolor CH USA Inc.', 'C42795': 'Technicolor CH USA Inc.', '206A8A': 'Wistron Infocomm (Zhongshan) Corporation', '802994': 'Technicolor CH USA Inc.', 'ECD68A': 'Shenzhen JMicron Intelligent Technology Developmen', '5052D2': 'Hangzhou Telin Technologies Co., Limited', '7C574E': 'COBI GmbH', '28F10E': 'Dell Inc.', '045604': 'Gionee Communication Equipment Co.,Ltd.', '90EED9': 'UNIVERSAL DE DESARROLLOS ELECTRÓNICOS, SA', '606453': 'AOD Co.,Ltd.', '6C98EB': 'Riverbed Technology, Inc.', 'C8AFE3': 'Hefei Radio Communication Technology Co., Ltd ', 'CCA260': 'SICHUAN TIANYI COMHEART TELECOMCO.,LTD', '00FEC8': 'Cisco Systems, Inc', '00253E': 'Sensus Metering Systems', '340AFF': 'Qingdao Hisense Communications Co.,Ltd.', '587E61': 'Qingdao Hisense Communications Co.,Ltd.', 'C0A1A2': 'MarqMetrix', '08D0B7': 'Qingdao Hisense Communications Co.,Ltd.', '34C0F9': 'Rockwell Automation', '2C5A8D': 'SYSTRONIK Elektronik u. Systemtechnik GmbH', '40F420': 'SICHUAN TIANYI COMHEART TELECOMCO.,LTD', '34A2A2': 'HUAWEI TECHNOLOGIES CO.,LTD', '10BEF5': 'D-Link International', 'A0415E': 'Opsens Solution Inc.', '001848': 'Vecima Networks Inc.', '0016FB': 'SHENZHEN MTC CO LTD', '74CC39': 'Fiberhome Telecommunication Technologies Co.,LTD', '009E1E': 'Cisco Systems, Inc', '0C8A87': 'AgLogica Holdings, Inc', '54EDA3': 'Navdy, Inc.', '945907': 'Shanghai HITE-BELDEN Network Technology Co., Ltd.', '749D8F': 'HUAWEI TECHNOLOGIES CO.,LTD', 'E47B3F': 'BEIJING CO-CLOUD TECHNOLOGY LTD.', '3822D6': 'Hangzhou H3C Technologies Co., Limited', 'C864C7': 'zte corporation', '0022E7': 'WPS Parking Systems', 'A860B6': 'Apple, Inc.', 'C4B301': 'Apple, Inc.', '4851B7': 'Intel Corporate', 'E05F45': 'Apple, Inc.', '483B38': 'Apple, Inc.', '1C9148': 'Apple, Inc.', '905F2E': 'TCT mobile ltd', 'B8E779': '9Solutions Oy', 'F823B2': 'HUAWEI TECHNOLOGIES CO.,LTD', '341290': 'Treeview Co.,Ltd.', '7CFE4E': 'Shenzhen Safe vision Technology Co.,LTD', '644FB0': 'Hyunjin.com', '28F366': 'Shenzhen Bilian electronic CO.,LTD', 'E0A3AC': 'HUAWEI TECHNOLOGIES CO.,LTD', 'BC7574': 'HUAWEI TECHNOLOGIES CO.,LTD', '20A680': 'HUAWEI TECHNOLOGIES CO.,LTD', '8828B3': 'HUAWEI TECHNOLOGIES CO.,LTD', '00E0E6': 'INCAA Computers', '5C70A3': 'LG Electronics (Mobile Communications)', '94E8C5': 'ARRIS Group, Inc.', '6C3B6B': 'Routerboard.com', '006CBC': 'Cisco Systems, Inc', '001D08': 'Jiangsu Yinhe Electronics Co.,Ltd.', 'B4B15A': 'Siemens AG Energy Management Division', '00A0A4': 'Oracle Corporation ', '749781': 'zte corporation', '001D82': 'GN Netcom A/S', '001317': 'GN Netcom A/S', 'A4E597': 'Gessler GmbH', 'A86BAD': 'Hon Hai Precision Ind. Co.,Ltd.', 'D80F99': 'Hon Hai Precision Ind. Co.,Ltd.', '00142A': 'Elitegroup Computer Systems Co.,Ltd.', '00115B': 'Elitegroup Computer Systems Co.,Ltd.', 'C03FD5': 'Elitegroup Computer Systems Co.,Ltd.', 'ECA86B': 'Elitegroup Computer Systems Co.,Ltd.', 'C89CDC': 'Elitegroup Computer Systems Co.,Ltd.', '002511': 'Elitegroup Computer Systems Co.,Ltd.', '4487FC': 'Elitegroup Computer Systems Co.,Ltd.', '001188': 'Enterasys', '0001F4': 'Enterasys', '00109B': 'Emulex Corporation', '9CDF03': 'Harman/Becker Automotive Systems GmbH', '00BD82': 'Shenzhen YOUHUA Technology Co., Ltd', '4CB8B5': 'Shenzhen YOUHUA Technology Co., Ltd', '94513D': 'iSmart Alarm, Inc.', '001174': ' Mojo Networks, Inc.', '001954': 'Leaf Corporation.', '9466E7': 'WOM Engineering', 'BC15AC': 'Vodafone Italia S.p.A.', 'EC93ED': 'DDoS-Guard LTD', '0050FC': 'Edimax Technology Co. Ltd.', '0016FA': 'ECI Telecom Ltd.', '002465': 'Elentec', '00089F': 'EFM Networks', '7085C2': 'ASRock Incorporation', 'F8A188': 'LED Roadway Lighting', 'A082AC': 'Linear DMS Solutions Sdn. Bhd.', 'A86AC1': 'HanbitEDS Co., Ltd.', 'D463FE': 'Arcadyan Corporation', '689361': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '24BCF8': 'HUAWEI TECHNOLOGIES CO.,LTD', 'DCEE06': 'HUAWEI TECHNOLOGIES CO.,LTD', '0452C7': 'Bose Corporation', '30FC68': 'TP-LINK TECHNOLOGIES CO.,LTD.', '008A96': 'Cisco Systems, Inc', 'BC60A7': 'Sony Interactive Entertainment Inc.', '808C97': 'Kaonmedia CO., LTD.', 'C84529': 'IMK Networks Co.,Ltd', 'E85659': 'Advanced-Connectek Inc.', 'F02745': 'F-Secure Corporation', '54D0B4': 'Xiamen Four-Faith Communication Technology Co.,Ltd', '00137C': 'Kaicom co., Ltd.', '34BF90': 'Fiberhome Telecommunication Technologies Co.,LTD', 'CCB3F8': 'FUJITSU ISOTEC LIMITED', 'E4A471': 'Intel Corporate', '10F005': 'Intel Corporate', '64CC2E': 'Xiaomi Communications Co Ltd', '8801F2': 'Vitec System Engineering Inc.', '14D11F': 'HUAWEI TECHNOLOGIES CO.,LTD', 'DC094C': 'HUAWEI TECHNOLOGIES CO.,LTD', '1C6758': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A0043E': 'Parker Hannifin Manufacturing Germany GmbH & Co. KG', '7C477C': 'IEEE Registration Authority', 'F877B8': 'Samsung Electronics Co.,Ltd', 'F0D2F1': 'Amazon Technologies Inc.', 'A8E3EE': 'Sony Interactive Entertainment Inc.', '00248D': 'Sony Interactive Entertainment Inc.', '00041F': 'Sony Interactive Entertainment Inc.', '20A90E': 'TCT mobile ltd', 'EC438B': 'YAPTV', '980CA5': 'Motorola (Wuhan) Mobility Technologies Communication Co., Ltd.', '441102': 'EDMI Europe Ltd', 'A85EE4': '12Sided Technology, LLC', '182195': 'Samsung Electronics Co.,Ltd', '44783E': 'Samsung Electronics Co.,Ltd', '0CA2F4': 'Chameleon Technology (UK) Limited', '50DA00': 'Hangzhou H3C Technologies Co., Limited', 'F4ED5F': 'SHENZHEN KTC TECHNOLOGY GROUP ', '00E0E4': 'FANUC ROBOTICS NORTH AMERICA, Inc.', '000896': 'Printronix, Inc.', 'D0B53D': 'SEPRO ROBOTIQUE', '00D0EC': 'NAKAYO Inc', 'BC44B0': 'Elastifile', '74BFB7': 'Nusoft Corporation', '245EBE': 'QNAP Systems, Inc.', '0404EA': 'Valens Semiconductor Ltd.', '4CCC6A': 'Micro-Star INTL CO., LTD.', '800DD7': 'Latticework, Inc', '30636B': 'Apple, Inc.', '70884D': 'JAPAN RADIO CO., LTD.', 'A4F1E8': 'Apple, Inc.', '14C3C2': 'K.A. Schmersal GmbH & Co. KG', '10785B': 'Actiontec Electronics, Inc', '546751': 'Compal Broadband Networks, Inc.', '240B0A': 'Palo Alto Networks', 'D099D5': 'Alcatel-Lucent', 'DC0077': 'TP-LINK TECHNOLOGIES CO.,LTD.', '00247D': 'Nokia Danmark A/S', '002265': 'Nokia Danmark A/S', '0018C5': 'Nokia Danmark A/S', 'F4F5A5': 'Nokia Corporation', 'EC9B5B': 'Nokia Corporation', '14BB6E': 'Samsung Electronics Co.,Ltd', '1886AC': 'Nokia Danmark A/S', '001F5C': 'Nokia Danmark A/S', '001F00': 'Nokia Danmark A/S', '00164E': 'Nokia Danmark A/S', '002668': 'Nokia Danmark A/S', '002547': 'Nokia Danmark A/S', 'F88096': 'Elsys Equipamentos Eletrônicos Ltda', 'A811FC': 'ARRIS Group, Inc.', '001DAA': 'DrayTek Corp.', 'E498D1': 'Microsoft Mobile Oy', '6C2779': 'Microsoft Mobile Oy', '2CCC15': 'Nokia Corporation', 'D8FB5E': 'ASKEY COMPUTER CORP', '00CF1C': 'Communication Machinery Corporation', '002326': 'FUJITSU LIMITED', '0CBF15': 'Genetec Inc.', '000D4B': 'Roku, Inc.', '0040FB': 'CASCADE COMMUNICATIONS', 'D0542D': 'Cambridge Industries(Group) Co.,Ltd.', '744AA4': 'zte corporation', '001BA9': 'Brother industries, LTD.', '30A220': 'ARG Telecom', '28CC01': 'Samsung Electronics Co.,Ltd', '6CF373': 'Samsung Electronics Co.,Ltd', '9C3AAF': 'Samsung Electronics Co.,Ltd', '781FDB': 'Samsung Electronics Co.,Ltd', '183F47': 'Samsung Electronics Co.,Ltd', 'B46293': 'Samsung Electronics Co.,Ltd', '50A4C8': 'Samsung Electronics Co.,Ltd', '0CB319': 'Samsung Electronics Co.,Ltd', '1867B0': 'Samsung Electronics Co.,Ltd', '00001D': 'Cabletron Systems, Inc.', '4CA56D': 'Samsung Electronics Co.,Ltd', 'B86CE8': 'Samsung Electronics Co.,Ltd', '6C8336': 'Samsung Electronics Co.,Ltd', 'DCD87C': 'Beijing Jingdong Century Trading Co., LTD.', 'C4DA7D': 'Ivium Technologies B.V.', '000B6A': 'Asiarock Technology Limited', '009096': 'ASKEY COMPUTER CORP', '001B9E': 'ASKEY COMPUTER CORP', 'E0CA94': 'ASKEY COMPUTER CORP', '0026B6': 'ASKEY COMPUTER CORP', '6CB0CE': 'NETGEAR', '008EF2': 'NETGEAR', '9CD36D': 'NETGEAR', 'C40415': 'NETGEAR', 'E8FCAF': 'NETGEAR', '002557': 'BlackBerry RTS', '001CCC': 'BlackBerry RTS', '00300A': 'Aztech Electronics Pte Ltd', '001F3F': 'AVM GmbH', '246511': 'AVM GmbH', 'C0FFD4': 'NETGEAR', '00264D': 'Arcadyan Technology Corporation', '849CA6': 'Arcadyan Technology Corporation', '841B5E': 'NETGEAR', '2CB05D': 'NETGEAR', 'A021B7': 'NETGEAR', '0024B2': 'NETGEAR', '001B2F': 'NETGEAR', '001F33': 'NETGEAR', 'E03E44': 'Broadcom', '001225': 'ARRIS Group, Inc.', '002040': 'ARRIS Group, Inc.', '386BBB': 'ARRIS Group, Inc.', 'E86D52': 'ARRIS Group, Inc.', '3C754A': 'ARRIS Group, Inc.', 'E48399': 'ARRIS Group, Inc.', '002143': 'ARRIS Group, Inc.', '74F612': 'ARRIS Group, Inc.', '002495': 'ARRIS Group, Inc.', '0024A0': 'ARRIS Group, Inc.', '145BD1': 'ARRIS Group, Inc.', '6CC1D2': 'ARRIS Group, Inc.', '1C1448': 'ARRIS Group, Inc.', '001784': 'ARRIS Group, Inc.', '00080E': 'ARRIS Group, Inc.', '00909C': 'ARRIS Group, Inc.', '001C11': 'ARRIS Group, Inc.', '001E46': 'ARRIS Group, Inc.', '0018A4': 'ARRIS Group, Inc.', '0018C0': 'ARRIS Group, Inc.', '002374': 'ARRIS Group, Inc.', 'ACE010': 'Liteon Technology Corporation', '747548': 'Amazon Technologies Inc.', '0000B1': 'Alpha Micro', '001802': 'Alpha Networks Inc.', '001E45': 'Sony Mobile Communications Inc', '001CA4': 'Sony Mobile Communications Inc', '001A75': 'Sony Mobile Communications Inc', '78843C': 'Sony Corporation', '0013A9': 'Sony Corporation', '000AD9': 'Sony Mobile Communications Inc', '000E07': 'Sony Mobile Communications Inc', '94CE2C': 'Sony Mobile Communications Inc', 'FC0FE6': 'Sony Interactive Entertainment Inc.', '74DE2B': 'Liteon Technology Corporation', '00225F': 'Liteon Technology Corporation', '5C93A2': 'Liteon Technology Corporation', '24FD52': 'Liteon Technology Corporation', '2CD05A': 'Liteon Technology Corporation', '74E543': 'Liteon Technology Corporation', '0015CF': 'ARRIS Group, Inc.', '6CFAA7': 'AMPAK Technology, Inc.', '0023F1': 'Sony Mobile Communications Inc', '54E4BD': 'FN-LINK TECHNOLOGY LIMITED', '5414FD': 'Orbbec 3D Technology International', '900BC1': 'Sprocomm Technologies CO.,Ltd', '001CA8': 'AirTies Wireless Networks', '485D60': 'AzureWave Technology Inc.', 'DC85DE': 'AzureWave Technology Inc.', '00238E': 'ADB Broadband Italia', '001D8B': 'ADB Broadband Italia', '0013C8': 'ADB Broadband Italia', 'DC0B1A': 'ADB Broadband Italia', 'B0EE45': 'AzureWave Technology Inc.', '54271E': 'AzureWave Technology Inc.', '0C6AE6': 'Stanley Security Solutions', '842615': 'ADB Broadband Italia', 'F0842F': 'ADB Broadband Italia', '28C2DD': 'AzureWave Technology Inc.', '80A589': 'AzureWave Technology Inc.', 'C40938': 'FUJIAN STAR-NET COMMUNICATION CO.,LTD', '001C50': 'TCL Technoly Electronics (Huizhou) Co., Ltd.', '00AA02': 'Intel Corporation', 'ACE5F0': 'Doppler Labs', 'F48E38': 'Dell Inc.', '74C63B': 'AzureWave Technology Inc.', '984FEE': 'Intel Corporate', 'E82AEA': 'Intel Corporate', '605718': 'Intel Corporate', 'C4D987': 'Intel Corporate', '7C7A91': 'Intel Corporate', 'AC7BA1': 'Intel Corporate', '6C2995': 'Intel Corporate', 'FCF8AE': 'Intel Corporate', '6036DD': 'Intel Corporate', '100BA9': 'Intel Corporate', '8C705A': 'Intel Corporate', '606720': 'Intel Corporate', '7C5CF8': 'Intel Corporate', 'B4E1C4': 'Microsoft Mobile Oy', 'E0757D': 'Motorola Mobility LLC, a Lenovo Company', '34BB26': 'Motorola Mobility LLC, a Lenovo Company', '806C1B': 'Motorola Mobility LLC, a Lenovo Company', '0016EB': 'Intel Corporate', '0018DE': 'Intel Corporate', '5CE0C5': 'Intel Corporate', '58A839': 'Intel Corporate', '001E67': 'Intel Corporate', '0022FA': 'Intel Corporate', '001500': 'Intel Corporate', 'A088B4': 'Intel Corporate', '648099': 'Intel Corporate', 'D07E35': 'Intel Corporate', '001E65': 'Intel Corporate', '348446': 'Ericsson AB', '044E06': 'Ericsson AB', '00270E': 'Intel Corporate', '0026B9': 'Dell Inc.', '00D09E': '2Wire Inc', '0019E4': '2Wire Inc', '001AC4': '2Wire Inc', '001B5B': '2Wire Inc', '001EC7': '2Wire Inc', 'DC7FA4': '2Wire Inc', 'B0D5CC': 'Texas Instruments', '3829DD': 'ONvocal Inc', '002650': '2Wire Inc', '002351': '2Wire Inc', '3CEA4F': '2Wire Inc', 'C81F66': 'Dell Inc.', '0015C5': 'Dell Inc.', '001422': 'Dell Inc.', '001E4F': 'Dell Inc.', '5C260A': 'Dell Inc.', '7845C4': 'Dell Inc.', '109836': 'Dell Inc.', '64006A': 'Dell Inc.', '800A80': 'IEEE Registration Authority', 'F8DB88': 'Dell Inc.', '3CA348': 'vivo Mobile Communication Co., Ltd.', 'E45AA2': 'vivo Mobile Communication Co., Ltd.', 'CC3B3E': 'Lester Electrical', '2082C0': 'Xiaomi Communications Co Ltd', 'DC6DCD': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'C4282D': 'Embedded Intellect Pty Ltd', '5846E1': 'Baxter International Inc', '00173F': 'Belkin International Inc.', '001CDF': 'Belkin International Inc.', 'C05627': 'Belkin International Inc.', '4C17EB': 'Sagemcom Broadband SAS', 'CC33BB': 'Sagemcom Broadband SAS', 'D86CE9': 'Sagemcom Broadband SAS', 'E8F1B0': 'Sagemcom Broadband SAS', '5C6B32': 'Texas Instruments', '84DD20': 'Texas Instruments', '001831': 'Texas Instruments', '24FD5B': 'SmartThings, Inc.', '2876CD': 'Funshion Online Technologies Co.,Ltd', '205532': 'Gotech International Technology Limited', '2CFF65': 'Oki Electric Industry Co., Ltd.', '2C27D7': 'Hewlett Packard', '984BE1': 'Hewlett Packard', '002926': 'Applied Optoelectronics, Inc Taiwan Branch', '24BA13': 'RISO KAGAKU CORPORATION', '001B11': 'D-Link Corporation', '00265A': 'D-Link Corporation', 'C8BE19': 'D-Link International', '0017E5': 'Texas Instruments', '0017EC': 'Texas Instruments', '0017E7': 'Texas Instruments', '0017E9': 'Texas Instruments', '1CBA8C': 'Texas Instruments', '0015E9': 'D-Link Corporation', 'B436A9': 'Fibocom Wireless Inc. ', '70CA4D': 'Shenzhen lnovance Technology Co.,Ltd.', '006037': 'NXP Semiconductors', '001A11': 'Google, Inc.', '28BC56': 'EMAC, Inc.', '00CDFE': 'Apple, Inc.', 'A0F895': 'Shenzhen TINNO Mobile Technology Corp.', '0078CD': 'Ignition Design Labs', 'DCC0EB': 'ASSA ABLOY CÔTE PICARDE', '0050C2': 'IEEE Registration Authority', '440010': 'Apple, Inc.', '0056CD': 'Apple, Inc.', '48DB50': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A4BA76': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C8665D': 'Aerohive Networks Inc.', 'C8478C': 'Beken Corporation', '9CEFD5': 'Panda Wireless, Inc.', '9C3426': 'ARRIS Group, Inc.', '2C6E85': 'Intel Corporate', '001DD1': 'ARRIS Group, Inc.', '001DCF': 'ARRIS Group, Inc.', '001DD5': 'ARRIS Group, Inc.', '001DD4': 'ARRIS Group, Inc.', 'E498D6': 'Apple, Inc.', '002283': 'Juniper Networks', 'E01C41': 'Aerohive Networks Inc.', 'D854A2': 'Aerohive Networks Inc.', 'CCA462': 'ARRIS Group, Inc.', '14CFE2': 'ARRIS Group, Inc.', '0010DB': 'Juniper Networks', '00121E': 'Juniper Networks', 'E4FAFD': 'Intel Corporate', '94659C': 'Intel Corporate', '484520': 'Intel Corporate', 'C80E77': 'Le Shi Zhi Xin Electronic Technology (Tianjin) Limited', '9049FA': 'Intel Corporate', 'BC0F64': 'Intel Corporate', '6455B1': 'ARRIS Group, Inc.', '0002B3': 'Intel Corporation', '000347': 'Intel Corporation', '000E0C': 'Intel Corporation', '001320': 'Intel Corporate', '001871': 'Hewlett Packard', '000E7F': 'Hewlett Packard', '001185': 'Hewlett Packard', '001279': 'Hewlett Packard', '001321': 'Hewlett Packard', '106F3F': 'BUFFALO.INC', 'B0C745': 'BUFFALO.INC', '18A905': 'Hewlett Packard', '001B78': 'Hewlett Packard', '44E137': 'ARRIS Group, Inc.', '0000C5': 'ARRIS Group, Inc.', 'D8D385': 'Hewlett Packard', '002556': 'Hon Hai Precision Ind. Co.,Ltd.', '601888': 'zte corporation', 'D860B0': 'bioMérieux Italia S.p.A.', 'D8FC38': 'Giantec Semiconductor Inc', 'AC2A0C': 'CSR ZHUZHOU INSTITUTE CO.,LTD.', 'F8DB7F': 'HTC Corporation', '64A769': 'HTC Corporation', 'E899C4': 'HTC Corporation', 'BCCFCC': 'HTC Corporation', '28565A': 'Hon Hai Precision Ind. Co.,Ltd.', '0080E1': 'STMicroelectronics SRL', '000802': 'Hewlett Packard', '0002A5': 'Hewlett Packard', '6CC217': 'Hewlett Packard', 'EC9A74': 'Hewlett Packard', '10604B': 'Hewlett Packard', 'C8CBB8': 'Hewlett Packard', '843497': 'Hewlett Packard', '1458D0': 'Hewlett Packard', '5C8A38': 'Hewlett Packard', '2C59E5': 'Hewlett Packard', '00234D': 'Hon Hai Precision Ind. Co.,Ltd.', '0004EA': 'Hewlett Packard', '2C6798': 'InTalTech Ltd.', '6CB56B': 'HUMAX Co., Ltd.', '74EA3A': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'F81A67': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'EC172F': 'TP-LINK TECHNOLOGIES CO.,LTD.', '14E6E4': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'E422A5': 'PLANTRONICS, INC.', 'D4C9B2': 'Quanergy Systems Inc', '6021C0': 'Murata Manufacturing Co., Ltd.', '002268': 'Hon Hai Precision Ind. Co.,Ltd.', 'BC3AEA': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '0019E0': 'TP-LINK TECHNOLOGIES CO.,LTD.', '002586': 'TP-LINK TECHNOLOGIES CO.,LTD.', '54E6FC': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'D81FCC': 'Brocade Communications Systems, Inc.', '847778': 'Cochlear Limited', '88308A': 'Murata Manufacturing Co., Ltd.', '5CDAD4': 'Murata Manufacturing Co., Ltd.', '0026E8': 'Murata Manufacturing Co., Ltd.', 'F431C3': 'Apple, Inc.', '64A5C3': 'Apple, Inc.', '887F03': 'Comper Technology Investment Limited', '344B50': 'zte corporation', 'FCC897': 'zte corporation', '002512': 'zte corporation', '300ED5': 'Hon Hai Precision Ind. Co.,Ltd.', '485AB6': 'Hon Hai Precision Ind. Co.,Ltd.', '543530': 'Hon Hai Precision Ind. Co.,Ltd.', 'E8088B': 'HUAWEI TECHNOLOGIES CO.,LTD', '0C96BF': 'HUAWEI TECHNOLOGIES CO.,LTD', '60E701': 'HUAWEI TECHNOLOGIES CO.,LTD', '50EB1A': 'Brocade Communications Systems, Inc.', '0027F8': 'Brocade Communications Systems, Inc.', '748EF8': 'Brocade Communications Systems, Inc.', '001C26': 'Hon Hai Precision Ind. Co.,Ltd.', '0016CE': 'Hon Hai Precision Ind. Co.,Ltd.', 'BC25E0': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F4E3FB': 'HUAWEI TECHNOLOGIES CO.,LTD', '002438': 'Brocade Communications Systems, Inc.', '0014C9': 'Brocade Communications Systems, Inc.', 'F866D1': 'Hon Hai Precision Ind. Co.,Ltd.', '90671C': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F0EBD0': 'Shanghai Feixun Communication Co.,Ltd.', '888603': 'HUAWEI TECHNOLOGIES CO.,LTD', '04F938': 'HUAWEI TECHNOLOGIES CO.,LTD', '0C37DC': 'HUAWEI TECHNOLOGIES CO.,LTD', 'BC7670': 'HUAWEI TECHNOLOGIES CO.,LTD', '24DBAC': 'HUAWEI TECHNOLOGIES CO.,LTD', '1C1D67': 'HUAWEI TECHNOLOGIES CO.,LTD', '84A8E4': 'HUAWEI TECHNOLOGIES CO.,LTD', 'EC233D': 'HUAWEI TECHNOLOGIES CO.,LTD', '78F5FD': 'HUAWEI TECHNOLOGIES CO.,LTD', '5C7D5E': 'HUAWEI TECHNOLOGIES CO.,LTD', '20F3A3': 'HUAWEI TECHNOLOGIES CO.,LTD', 'AC853D': 'HUAWEI TECHNOLOGIES CO.,LTD', '4846FB': 'HUAWEI TECHNOLOGIES CO.,LTD', 'E0247F': 'HUAWEI TECHNOLOGIES CO.,LTD', '00464B': 'HUAWEI TECHNOLOGIES CO.,LTD', '001882': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D02DB3': 'HUAWEI TECHNOLOGIES CO.,LTD', 'E8CD2D': 'HUAWEI TECHNOLOGIES CO.,LTD', '2469A5': 'HUAWEI TECHNOLOGIES CO.,LTD', '64C354': 'Avaya Inc', '50CD22': 'Avaya Inc', 'B4A95A': 'Avaya Inc', '581626': 'Avaya Inc', 'A051C6': 'Avaya Inc', 'D4EA0E': 'Avaya Inc', '6CFA58': 'Avaya Inc', '6CA849': 'Avaya Inc', 'CC53B5': 'HUAWEI TECHNOLOGIES CO.,LTD', '60DE44': 'HUAWEI TECHNOLOGIES CO.,LTD', '105172': 'HUAWEI TECHNOLOGIES CO.,LTD', '08E84F': 'HUAWEI TECHNOLOGIES CO.,LTD', '643E8C': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A086C6': 'Xiaomi Communications Co Ltd', '9C99A0': 'Xiaomi Communications Co Ltd', '584498': 'Xiaomi Communications Co Ltd', '0012D2': 'Texas Instruments', 'A863F2': 'Texas Instruments', 'D0FF50': 'Texas Instruments', '20C38F': 'Texas Instruments', '70E422': 'Cisco Systems, Inc', '00500F': 'Cisco Systems, Inc', '0050A2': 'Cisco Systems, Inc', '7C669D': 'Texas Instruments', 'D8DDFD': 'Texas Instruments', 'D05FB8': 'Texas Instruments', '84EB18': 'Texas Instruments', '80FB06': 'HUAWEI TECHNOLOGIES CO.,LTD', '7C6097': 'HUAWEI TECHNOLOGIES CO.,LTD', '7C1DD9': 'Xiaomi Communications Co Ltd', '00023D': 'Cisco Systems, Inc', '00502A': 'Cisco Systems, Inc', 'EC1127': 'Texas Instruments', 'E8EDF3': 'Cisco Systems, Inc', 'E4C722': 'Cisco Systems, Inc', '64E950': 'Cisco Systems, Inc', 'C07BBC': 'Cisco Systems, Inc', '24E9B3': 'Cisco Systems, Inc', '00E0F9': 'Cisco Systems, Inc', 'C8D719': 'Cisco-Linksys, LLC', '203A07': 'Cisco Systems, Inc', 'B0FAEB': 'Cisco Systems, Inc', '7CAD74': 'Cisco Systems, Inc', 'F41FC2': 'Cisco Systems, Inc', '44ADD9': 'Cisco Systems, Inc', '000E08': 'Cisco-Linksys, LLC', '00603E': 'Cisco Systems, Inc', '00602F': 'Cisco Systems, Inc', '006047': 'Cisco Systems, Inc', '00E0B0': 'Cisco Systems, Inc', '00E0FE': 'Cisco Systems, Inc', '00E034': 'Cisco Systems, Inc', '0C6803': 'Cisco Systems, Inc', 'C08C60': 'Cisco Systems, Inc', '0011D8': 'ASUSTek COMPUTER INC.', '0018F3': 'ASUSTek COMPUTER INC.', '001A92': 'ASUSTek COMPUTER INC.', '001079': 'Cisco Systems, Inc', '001029': 'Cisco Systems, Inc', '4403A7': 'Cisco Systems, Inc', 'F07816': 'Cisco Systems, Inc', '00223A': 'Cisco SPVTG', '0021BE': 'Cisco SPVTG', '000C41': 'Cisco-Linksys, LLC', '0016B6': 'Cisco-Linksys, LLC', '0018F8': 'Cisco-Linksys, LLC', '00252E': 'Cisco SPVTG', '54D46F': 'Cisco SPVTG', 'A4A24A': 'Cisco SPVTG', '44E08E': 'Cisco SPVTG', 'BCC810': 'Cisco SPVTG', '7CB21B': 'Cisco SPVTG', '24767D': 'Cisco SPVTG', '481D70': 'Cisco SPVTG', '00E036': 'PIONEER CORPORATION', 'F07F06': 'Cisco Systems, Inc', 'BC16F5': 'Cisco Systems, Inc', 'FC5B39': 'Cisco Systems, Inc', '346F90': 'Cisco Systems, Inc', '5CFC66': 'Cisco Systems, Inc', 'D46D50': 'Cisco Systems, Inc', '74A02F': 'Cisco Systems, Inc', 'F4CFE2': 'Cisco Systems, Inc', 'A80C0D': 'Cisco Systems, Inc', '88F031': 'Cisco Systems, Inc', '1CDEA7': 'Cisco Systems, Inc', '88908D': 'Cisco Systems, Inc', '14DAE9': 'ASUSTek COMPUTER INC.', '00E04F': 'Cisco Systems, Inc', '0010FF': 'Cisco Systems, Inc', '001054': 'Cisco Systems, Inc', '0010F6': 'Cisco Systems, Inc', '0010A6': 'Cisco Systems, Inc', '001EC2': 'Apple, Inc.', '001FF3': 'Apple, Inc.', '002332': 'Apple, Inc.', '00236C': 'Apple, Inc.', '0023DF': 'Apple, Inc.', '002500': 'Apple, Inc.', 'F0B2E5': 'Cisco Systems, Inc', '5897BD': 'Cisco Systems, Inc', '5C838F': 'Cisco Systems, Inc', 'ECBD1D': 'Cisco Systems, Inc', '0010FA': 'Apple, Inc.', '0050E4': 'Apple, Inc.', '000D93': 'Apple, Inc.', '0019E3': 'Apple, Inc.', '001B63': 'Apple, Inc.', '58B035': 'Apple, Inc.', '5C5948': 'Apple, Inc.', 'C8BCC8': 'Apple, Inc.', '28E7CF': 'Apple, Inc.', 'E4CE8F': 'Apple, Inc.', 'E8040B': 'Apple, Inc.', '24AB81': 'Apple, Inc.', 'E0F847': 'Apple, Inc.', '0025BC': 'Apple, Inc.', '34159E': 'Apple, Inc.', '7CFADF': 'Apple, Inc.', '1CABA7': 'Apple, Inc.', '8CFABA': 'Apple, Inc.', '7CD1C3': 'Apple, Inc.', 'F0DCE2': 'Apple, Inc.', 'A82066': 'Apple, Inc.', '28E02C': 'Apple, Inc.', 'E0B9BA': 'Apple, Inc.', '00C610': 'Apple, Inc.', '78A3E4': 'Apple, Inc.', '145A05': 'Apple, Inc.', '148FC6': 'Apple, Inc.', '286AB8': 'Apple, Inc.', 'C0847A': 'Apple, Inc.', 'B8F6B1': 'Apple, Inc.', 'BC52B7': 'Apple, Inc.', '98B8E3': 'Apple, Inc.', '786C1C': 'Apple, Inc.', '4C8D79': 'Apple, Inc.', '1CE62B': 'Apple, Inc.', '881FA1': 'Apple, Inc.', '885395': 'Apple, Inc.', '24A2E1': 'Apple, Inc.', '04214C': 'Insight Energy Ventures LLC', 'F832E4': 'ASUSTek COMPUTER INC.', '80EA96': 'Apple, Inc.', '600308': 'Apple, Inc.', '04F13E': 'Apple, Inc.', '98F0AB': 'Apple, Inc.', '0C3021': 'Apple, Inc.', 'DC86D8': 'Apple, Inc.', '90B931': 'Apple, Inc.', 'D0E140': 'Apple, Inc.', '24A074': 'Apple, Inc.', 'F02475': 'Apple, Inc.', '2C1F23': 'Apple, Inc.', '549F13': 'Apple, Inc.', 'F0DBE2': 'Apple, Inc.', '748114': 'Apple, Inc.', '18F643': 'Apple, Inc.', '5C8D4E': 'Apple, Inc.', '8863DF': 'Apple, Inc.', '0C3E9F': 'Apple, Inc.', '783A84': 'Apple, Inc.', '84788B': 'Apple, Inc.', '80BE05': 'Apple, Inc.', 'C8E0EB': 'Apple, Inc.', '7831C1': 'Apple, Inc.', '9C293F': 'Apple, Inc.', '80A1AB': 'Intellisis', '84285A': 'Saffron Solutions Inc', 'D4B8FF': 'Home Control Singapore Pte Ltd', '98E0D9': 'Apple, Inc.', 'CC29F5': 'Apple, Inc.', '285AEB': 'Apple, Inc.', 'FCFC48': 'Apple, Inc.', 'A8667F': 'Apple, Inc.', 'D02598': 'Apple, Inc.', '087402': 'Apple, Inc.', '94F6A3': 'Apple, Inc.', 'A45E60': 'Apple, Inc.', 'A01828': 'Apple, Inc.', 'D0034B': 'Apple, Inc.', '10417F': 'Apple, Inc.', 'EC64E7': 'MOCACARE Corporation', '40862E': 'JDM MOBILE INTERNET SOLUTION CO., LTD.', '58F496': 'Source Chain', '3C7873': 'Airsonics', '9C88AD': 'Fiberhome Telecommunication Technologies Co.,LTD', 'C8A2CE': 'Oasis Media Systems LLC', '84A423': 'Sagemcom Broadband SAS', '98F428': 'zte corporation', 'E8343E': 'Beijing Infosec Technologies Co., LTD.', '346987': 'zte corporation', '587F57': 'Apple, Inc.', '988744': 'Wuxi Hongda Science and Technology Co.,LTD', 'D07C2D': 'Leie IOT technology Co., Ltd', '88947E': 'Fiberhome Telecommunication Technologies Co.,LTD', '38F557': 'JOLATA, INC.', '4054E4': 'Wearsafe Labs Inc', 'A4CC32': 'Inficomm Co., Ltd', 'C4BBEA': 'Pakedge Device and Software Inc', 'DC9A8E': 'Nanjing Cocomm electronics co., LTD', '3C831E': 'CKD Corporation', '90DFFB': 'HOMERIDER SYSTEMS', '2C081C': 'OVH', 'C08488': 'Finis Inc', '305A3A': 'ASUSTek COMPUTER INC.', '006D52': 'Apple, Inc.', '70BF3E': 'Charles River Laboratories', 'A8C87F': 'Roqos, Inc.', 'A03299': 'Lenovo (Beijing) Co., Ltd.', 'ACEE9E': 'Samsung Electronics Co.,Ltd', '5CCF7F': 'Espressif Inc.', '385F66': 'Cisco SPVTG', 'B844D9': 'Apple, Inc.', '7011AE': 'Music Life LTD', '9C7A03': 'Ciena Corporation', '246C8A': 'YUKAI Engineering', 'ACC51B': 'Zhuhai Pantum Electronics Co., Ltd.', '681295': 'Lupine Lighting Systems GmbH', 'B857D8': 'Samsung Electronics Co.,Ltd', '54A3FA': 'BQT Solutions (Australia)Pty Ltd', '041E7A': 'DSPWorks', 'C8A9FC': 'Goyoo Networks Inc.', '2CCF58': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D09380': 'Ducere Technologies Pvt. Ltd.', '68F956': 'Objetivos y Servicio de Valor Añadido', '84A788': 'Perples', 'AC60B6': 'Ericsson AB', '14B370': 'Gigaset Digital Technology (Shenzhen) Co., Ltd.', '6889C1': 'HUAWEI TECHNOLOGIES CO.,LTD', '444CA8': 'Arista Networks', '7C2BE1': 'Shenzhen Ferex Electrical Co.,Ltd', '5031AD': 'ABB Global Industries and Services Private Limited', '1C497B': 'Gemtek Technology Co., Ltd.', 'A4C138': 'Telink Semiconductor (Taipei) Co. Ltd.', '143EBF': 'zte corporation', 'FC2FEF': 'UTT Technologies Co., Ltd.', '20F510': 'Codex Digital Limited', 'F09A51': 'Shanghai Viroyal Electronic Technology Company Limited', '7CA23E': 'HUAWEI TECHNOLOGIES CO.,LTD', '689AB7': 'Atelier Vision Corporation', 'A8741D': 'PHOENIX CONTACT Electronics GmbH', 'FCFEC2': 'Invensys Controls UK Limited', '1C56FE': 'Motorola Mobility LLC, a Lenovo Company', 'A48D3B': 'Vizio, Inc', '4CB82C': 'Cambridge Mobile Telematics, Inc.', 'E4A32F': 'Shanghai Artimen Technology Co., Ltd.', 'BCEB5F': 'Fujian Beifeng Telecom Technology Co., Ltd.', 'B899B0': 'Cohere Technologies', 'A8D828': 'Ascensia Diabetes Care', 'B869C2': 'Sunitec Enterprise Co., Ltd.', '88CBA5': 'Suzhou Torchstar Intelligent Technology Co.,Ltd', '046169': 'MEDIA GLOBAL LINKS CO., LTD.', 'F4672D': 'ShenZhen Topstar Technology Company', 'E00370': 'ShenZhen Continental Wireless Technology Co., Ltd.', 'D85DEF': 'Busch-Jaeger Elektro GmbH', '501AA5': 'GN Netcom A/S', '3C3178': 'Qolsys Inc.', '809FAB': 'Fiberhome Telecommunication Technologies Co.,LTD', '88A2D7': 'HUAWEI TECHNOLOGIES CO.,LTD', 'AC562C': 'LAVA INTERNATIONAL(H.K) LIMITED', 'FC9AFA': 'Motus Global Inc.', 'F0AB54': 'MITSUMI ELECTRIC CO.,LTD.', '14157C': 'TOKYO COSMOS ELECTRIC CO.,LTD.', '20E407': 'Spark srl', '887384': 'Toshiba', 'D09DAB': 'TCT mobile ltd', '08ECA9': 'Samsung Electronics Co.,Ltd', 'AC5A14': 'Samsung Electronics Co.,Ltd', '00323A': 'so-logic', 'F46A92': 'SHENZHEN FAST TECHNOLOGIES CO.,LTD', 'F0D657': 'ECHOSENS', '9C37F4': 'HUAWEI TECHNOLOGIES CO.,LTD', '3C4711': 'HUAWEI TECHNOLOGIES CO.,LTD', '5CEB68': 'Cheerstar Technology Co., Ltd', 'E04B45': 'Hi-P Electronics Pte Ltd', '486EFB': 'Davit System Technology Co., Ltd.', 'C0DC6A': 'Qingdao Eastsoft Communication Technology Co.,LTD', '24693E': 'innodisk Corporation', '1CF03E': 'Wearhaus Inc.', '24B0A9': 'Shanghai Mobiletek Communication Ltd.', '407FE0': 'Glory Star Technics (ShenZhen) Limited', 'C8E130': 'Milkyway Group Ltd', '1CC586': 'Absolute Acoustics', '5C5B35': 'Mist Systems, Inc.', '805067': 'W & D TECHNOLOGY CORPORATION', '78F944': 'Private', 'B0966C': 'Lanbowan Technology Ltd.', '247656': 'Shanghai Net Miles Fiber Optics Technology Co., LTD.', 'F8CFC5': 'Motorola Mobility LLC, a Lenovo Company', 'E4F939': 'Minxon Hotel Technology INC.', '146B72': 'Shenzhen Fortune Ship Technology Co., Ltd.', 'C43ABE': 'Sony Mobile Communications Inc', '883B8B': 'Cheering Connection Co. Ltd.', 'B8F080': 'SPS, INC.', 'A47B85': 'ULTIMEDIA Co Ltd,', 'ECBAFE': 'GIROPTIC', '3C2C94': '杭州德澜科技有限公司(HangZhou Delan Technology Co.,Ltd)', '241B44': 'Hangzhou Tuners Electronics Co., Ltd', '7840E4': 'Samsung Electronics Co.,Ltd', 'E09971': 'Samsung Electronics Co.,Ltd', '70DA9C': 'TECSEN', '2CA2B4': 'Fortify Technologies, LLC', '10D38A': 'Samsung Electronics Co.,Ltd', '80A85D': 'Osterhout Design Group', 'ACCAAB': 'Virtual Electric Inc', '485415': 'NET RULES TECNOLOGIA EIRELI', '847303': 'Letv Mobile and Intelligent Information Technology (Beijing) Corporation Ltd.', '206274': 'Microsoft Corporation', 'E8162B': 'IDEO Security Co., Ltd.', '8C873B': 'Leica Camera AG', 'B47356': 'Hangzhou Treebear Networking Co., Ltd.', '346895': 'Hon Hai Precision Ind. Co.,Ltd.', '3CC2E1': 'XINHUA CONTROL ENGINEERING CO.,LTD', 'E48501': 'Geberit International AG', '44F477': 'Juniper Networks', '142971': 'NEMOA ELECTRONICS (HK) CO. LTD', '3C6A9D': 'Dexatek Technology LTD.', '78E980': 'RainUs Co.,Ltd', 'E0FFF7': 'Softiron Inc.', '349E34': 'Evervictory Electronic Co.Ltd', 'BC74D7': 'HangZhou JuRu Technology CO.,LTD', 'D89341': 'General Electric Global Research', '78EB14': 'SHENZHEN FAST TECHNOLOGIES CO.,LTD', '3C4937': 'ASSMANN Electronic GmbH', '844464': 'ServerU Inc', '003560': 'Rosen Aviation', 'F8BC41': 'Rosslare Enterprises Limited', '700FC7': 'SHENZHEN IKINLOOP TECHNOLOGY CO.,LTD.', '3CB792': 'Hitachi Maxell, Ltd., Optronics Division', '28D98A': 'Hangzhou Konke Technology Co.,Ltd.', 'D88039': 'Microchip Technology Inc.', '1C9ECB': 'Beijing Nari Smartchip Microelectronics Company Limited', 'D48DD9': 'Meld Technology, Inc', 'DCC622': 'BUHEUNG SYSTEM', '70FF5C': 'Cheerzing Communication(Xiamen)Technology Co.,Ltd', '8870EF': 'SC Professional Trading Co., Ltd.', '902CC7': 'C-MAX Asia Limited', '94C038': 'Tallac Networks', '6836B5': 'DriveScale, Inc.', 'F8B2F3': 'GUANGZHOU BOSMA TECHNOLOGY CO.,LTD', '5C966A': 'RTNET', 'BCBC46': 'SKS Welding Systems GmbH', 'C40880': 'Shenzhen UTEPO Tech Co., Ltd.', '90179B': 'Nanomegas', '14F893': 'Wuhan FiberHome Digital Technology Co.,Ltd.', '582136': 'KMB systems, s.r.o.', '800902': 'Keysight Technologies, Inc.', '0499E6': 'Shenzhen Yoostar Technology Co., Ltd', '205CFA': 'Yangzhou ChangLian Network Technology Co,ltd.', '9816EC': 'IC Intracom', 'D062A0': 'China Essence Technology (Zhumadian) Co., Ltd.', '4C48DA': 'Beijing Autelan Technology Co.,Ltd', '84930C': 'InCoax Networks Europe AB', '1CA2B1': 'ruwido austria gmbh', '384B76': 'AIRTAME ApS', '34B7FD': 'Guangzhou Younghead Electronic Technology Co.,Ltd', '34F6D2': 'Panasonic Taiwan Co.,Ltd.', '307512': 'Sony Mobile Communications Inc', 'D48F33': 'Microsoft Corporation', 'B47C29': 'Shenzhen Guzidi Technology Co.,Ltd', '489D18': 'Flashbay Limited', '38B1DB': 'Hon Hai Precision Ind. Co.,Ltd.', 'B41780': 'DTI Group Ltd', '90203A': 'BYD Precision Manufacture Co.,Ltd', '64002D': 'Powerlinq Co., LTD', '54F876': 'ABB AG', '4CBB58': 'Chicony Electronics Co., Ltd.', 'A41242': 'NEC Platforms, Ltd.', 'C40006': 'Lipi Data Systems Ltd.', '38F33F': 'TATSUNO CORPORATION', 'D80CCF': 'C.G.V. S.A.S.', '20A99B': 'Microsoft Corporation', '604826': 'Newbridge Technologies Int. Ltd.', '80EACA': 'Dialog Semiconductor Hellas SA', 'D00AAB': 'Yokogawa Digital Computer Corporation', '38262B': 'UTran Technology', 'FC790B': 'Hitachi High Technologies America, Inc.', '480C49': 'NAKAYO Inc', '3CD9CE': 'Eclipse WiFi', '6077E2': 'Samsung Electronics Co.,Ltd', 'FC1910': 'Samsung Electronics Co.,Ltd', '6081F9': 'Helium Systems, Inc', '98F170': 'Murata Manufacturing Co., Ltd.', '04C991': 'Phistek INC.', '3CA10D': 'Samsung Electronics Co.,Ltd', '646CB2': 'Samsung Electronics Co.,Ltd', 'C8E42F': 'Technical Research Design and Development', 'C4C9EC': 'GugaooHK Limited', '8401A7': 'Greyware Automation Products, Inc', '680571': 'Samsung Electronics Co.,Ltd', '686E48': 'Prophet Electronic Technology Corp.,Ltd', '14B484': 'Samsung Electronics Co.,Ltd', 'F4C447': 'Coagent International Enterprise Limited', 'A43D78': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'A0FC6E': 'Telegrafia a.s.', '2053CA': 'Risk Technology Ltd', 'A49F85': 'Lyve Minds, Inc', '04572F': 'Sertel Electronics UK Ltd', 'D8977C': 'Grey Innovation', 'BC8D0E': 'Nokia', '78923E': 'Nokia Corporation', '6C14F7': 'Erhardt+Leimer GmbH', 'B0D59D': 'Shenzhen Zowee Technology Co., Ltd', '6828BA': 'Dejai', '50C7BF': 'TP-LINK TECHNOLOGIES CO.,LTD.', '209AE9': 'Volacomm Co., Ltd', '345D10': 'Wytek', 'A481EE': 'Nokia Corporation', '78D66F': 'Aristocrat Technologies Australia Pty. Ltd.', '441E91': 'ARVIDA Intelligent Electronics Technology Co.,Ltd.', '34E42A': 'Automatic Bar Controls Inc.', '3059B7': 'Microsoft', 'C4626B': 'ZPT Vigantice', '205A00': 'Coval', 'A8A668': 'zte corporation', 'ACA9A0': 'Audioengine, Ltd.', '20A787': 'Bointec Taiwan Corporation Limited', 'A8BD3A': 'UNIONMAN TECHNOLOGY CO.,LTD', 'A824EB': 'ZAO NPO Introtest', '3C89A6': 'KAPELSE', 'A46CC1': 'LTi REEnergy GmbH', 'C40E45': 'ACK Networks,Inc.', 'EC1766': 'Research Centre Module', 'A0D12A': 'AXPRO Technology Inc.', 'B05706': 'Vallox Oy', '48EE07': 'Silver Palm Technologies LLC', 'A8B9B3': 'ESSYS', '6C09D6': 'Digiquest Electronics LTD', '481842': 'Shanghai Winaas Co. Equipment Co. Ltd.', '447098': 'MING HONG TECHNOLOGY (SHEN ZHEN) LIMITED', '00EEBD': 'HTC Corporation', '2C534A': 'Shenzhen Winyao Electronic Limited', 'A4BBAF': 'Lime Instruments', 'D4319D': 'Sinwatec', 'B068B6': 'Hangzhou OYE Technology Co. Ltd', 'E44C6C': 'Shenzhen Guo Wei Electronic Co,. Ltd.', 'E0DB88': 'Open Standard Digital-IF Interface for SATCOM Systems', '282246': 'Beijing Sinoix Communication Co., LTD', '9C65F9': 'AcSiP Technology Corp.', '487604': 'Private', '9CBD9D': 'SkyDisk, Inc.', '74C621': 'Zhejiang Hite Renewable Energy Co.,LTD', 'A8574E': 'TP-LINK TECHNOLOGIES CO.,LTD.', '38F708': 'National Resource Management, Inc.', 'F490CA': 'Tensorcom', 'B898F7': 'Gionee Communication Equipment Co,Ltd.ShenZhen', '848336': 'Newrun', '7071B3': 'Brain Corporation', '64E625': 'Woxu Wireless Co., Ltd', 'B87AC9': 'Siemens Ltd.', 'F06130': 'Advantage Pharmacy Services, LLC', '44C56F': 'NGN Easy Satfinder (Tianjin) Electronic Co., Ltd', '5C2AEF': 'Open Access Pty Ltd', '44C4A9': 'Opticom Communication, LLC', '6C3C53': 'SoundHawk Corp', 'C4824E': 'Changzhou Uchip Electronics Co., LTD.', 'B843E4': 'Vlatacom', '48B977': 'PulseOn Oy', 'B07908': 'Cummings Engineering', 'E47723': 'zte corporation', '9CA9E4': 'zte corporation', '90F3B7': 'Kirisun Communications Co., Ltd.', '10B713': 'Private', '100E7E': 'Juniper Networks', '208986': 'zte corporation', '182012': 'Aztech Associates Inc.', 'B8266C': 'ANOV France', '3C300C': 'Dewar Electronics Pty Ltd', '98FFD0': 'Lenovo Mobile Communication Technology Ltd.', 'A875E2': 'Aventura Technologies, Inc.', '38BF2F': 'Espec Corp.', 'A47760': 'Nokia Corporation', 'C85663': 'Sunflex Europe GmbH', '88FED6': 'ShangHai WangYong Software Co., Ltd.', '7C72E4': 'Unikey Technologies', '7C2048': 'KoamTac', '8CB7F7': 'Shenzhen UniStrong Science & Technology Co., Ltd', '084027': 'Gridstore Inc.', '18AA45': 'Fon Technology', '94B9B4': 'Aptos Technology', 'B4527E': 'Sony Mobile Communications Inc', '1C63B7': 'OpenProducts 237 AB', '7060DE': 'LaVision GmbH', '502E5C': 'HTC Corporation', 'FCFE77': 'Hitachi Reftechno, Inc.', 'DC3EF8': 'Nokia Corporation', 'A49F89': 'Shanghai Rui Rui Communication Technology Co.Ltd.', '50C271': 'SECURETECH INC', '902083': 'General Engine Management Systems Ltd.', '14B126': 'Industrial Software Co', 'D850E6': 'ASUSTek COMPUTER INC.', '407A80': 'Nokia Corporation', '644214': 'Swisscom Energy Solutions AG', '0CA694': 'Sunitec Enterprise Co.,Ltd', '184462': 'Riava Networks, Inc.', '38B74D': 'Fijowave Limited', '180C14': 'iSonea Limited', 'C4E92F': 'AB Sciex', 'A88D7B': 'SunDroid Global limited.', '3CF748': 'Shenzhen Linsn Technology Development Co.,Ltd', '6C15F9': 'Nautronix Limited', '70533F': 'Alfa Instrumentos Eletronicos Ltda.', '643F5F': 'Exablaze', '9CE7BD': 'Winduskorea co., Ltd', '3842A6': 'Ingenieurbuero Stahlkopf', 'C03580': 'A&R TECH', 'D08A55': 'Skullcandy', '344F3F': 'IO-Power Technology Co., Ltd.', '2C553C': 'Gainspeed, Inc.', '248000': 'Westcontrol AS', '1C4BB9': 'SMG ENTERPRISE, LLC', '346178': 'The Boeing Company', '1446E4': 'AVISTEL', 'D095C7': 'Pantech Co., Ltd.', 'D02C45': 'littleBits Electronics, Inc.', '146080': 'zte corporation', '9CBB98': 'Shen Zhen RND Electronic Co.,LTD', 'B4A82B': 'Histar Digital Electronics Co., Ltd.', '284D92': 'Luminator', 'CC720F': 'Viscount Systems Inc.', '742B62': 'FUJITSU LIMITED', '044F8B': 'Adapteva, Inc.', 'F45F69': 'Matsufu Electronics distribution Company', '28A1EB': 'ETEK TECHNOLOGY (SHENZHEN) CO.,LTD', 'B8F828': 'Changshu Gaoshida Optoelectronic Technology Co. Ltd.', '3C1A57': 'Cardiopulmonary Corp', '541B5D': 'Techno-Innov', '205721': 'Salix Technology CO., Ltd.', '883612': 'SRC Computers, LLC', '083571': 'CASwell INC.', '9876B6': 'Adafruit', '503CC4': 'Lenovo Mobile Communication Technology Ltd.', '8C2F39': 'IBA Dosimetry GmbH', 'C0A0BB': 'D-Link International', '2CCD69': 'Aqavi.com', '2C7B84': 'OOO Petr Telegin', 'A4C0C7': 'ShenZhen Hitom Communication Technology Co..LTD', '78FE41': 'Socus networks', '940BD5': 'Himax Technologies, Inc', '789F4C': 'HOERBIGER Elektronik GmbH', '54FB58': 'WISEWARE, Lda', '18104E': 'CEDINT-UPM', 'E0D1E6': 'Aliph dba Jawbone', 'D82D9B': 'Shenzhen G.Credit Communication Technology Co., Ltd', '709BFC': 'Bryton Inc.', 'ACE42E': 'SK hynix', 'F4CD90': 'Vispiron Rotec GmbH', '806C8B': 'KAESER KOMPRESSOREN AG', '043D98': 'ChongQing QingJia Electronics CO.,LTD', 'E03E4A': 'Cavanagh Group International', '041B94': 'Host Mobility AB', 'A0CEC8': 'CE LINK LIMITED', '907A28': 'Beijing Morncloud Information And Technology Co. Ltd.', 'DC6F00': 'Livescribe, Inc.', '54E3B0': 'JVL Industri Elektronik', '1001CA': 'Ashley Butterworth', '246AAB': 'IT-IS International', '306112': 'PAV GmbH', 'FC4BBC': 'Sunplus Technology Co., Ltd.', '9C1465': 'Edata Elektronik San. ve Tic. A.Ş.', '4C55CC': 'Zentri Pty Ltd', '00C5DB': 'Datatech Sistemas Digitales Avanzados SL', '8CF945': 'Power Automation pte Ltd', 'F842FB': 'Yasuda Joho Co.,ltd.', 'C0DA74': 'Hangzhou Sunyard Technology Co., Ltd.', '882364': 'Watchnet DVR Inc', '887398': 'K2E Tekpoint', '2C922C': 'Kishu Giken Kogyou Company Ltd,.', 'D8FEE3': 'D-Link International', '58F387': 'HCCP', '3C977E': 'IPS Technology Limited', 'A4FB8D': 'Hangzhou Dunchong Technology Co.Ltd', '581CBD': 'Affinegy', '284FCE': 'Liaoning Wontel Science and Technology Development Co.,Ltd.', '048D38': 'Netcore Technology Inc.', '107A86': 'U&U ENGINEERING INC.', '40BD9E': 'Physio-Control, Inc', '6C5779': 'Aclima, Inc.', '3065EC': 'Wistron (ChongQing)', '50A0BF': 'Alba Fiber Systems Inc.', 'B836D8': 'Videoswitch', '542CEA': 'PROTECTRON', '804B20': 'Ventilation Control', '287994': 'Realplay Digital Technology(Shenzhen) Co.,Ltd', '18D6CF': 'Kurth Electronic GmbH', 'F48139': 'CANON INC.', '1836FC': 'Elecsys International Corporation', 'A4D094': 'Erwin Peters Systemtechnik GmbH', '604A1C': 'SUYIN Corporation', '082719': 'APS systems/electronic AG', '505AC6': 'GUANGDONG SUPER TELECOM CO.,LTD.', '2C9464': 'Cincoze Co., Ltd.', '9C79AC': 'Suntec Software(Shanghai) Co., Ltd.', '4C9614': 'Juniper Networks', 'B863BC': 'ROBOTIS, Co, Ltd', '980D2E': 'HTC Corporation', 'D464F7': 'CHENGDU USEE DIGITAL TECHNOLOGY CO., LTD', '94B8C5': 'RuggedCom Inc.', 'C419EC': 'Qualisys AB', 'DC825B': 'JANUS, spol. s r.o.', '9CA577': 'Osorno Enterprises Inc.', 'C04301': 'Epec Oy', 'E07C62': 'Whistle Labs, Inc.', '5C8486': 'Brightsource Industries Israel LTD', '50CD32': 'NanJing Chaoran Science & Technology Co.,Ltd.', 'BCBAE1': 'AREC Inc.', '18FA6F': 'ISC applied systems corp', 'A01C05': 'NIMAX TELECOM CO.,LTD.', '60E00E': 'SHINSEI ELECTRONICS CO LTD', '545414': 'Digital RF Corea, Inc', '24EB65': 'SAET I.S. S.r.l.', 'D0F27F': 'SteadyServ Technoligies, LLC', 'E894F6': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'F07F0C': 'Leopold Kostal GmbH &Co. KG', '4C6255': 'SANMINA-SCI SYSTEM DE MEXICO S.A. DE C.V.', '188410': 'CoreTrust Inc.', 'FC229C': 'Han Kyung I Net Co.,Ltd.', '1832A2': 'LAON TECHNOLOGY CO., LTD.', 'DC2BCA': 'Zera GmbH', '9498A2': 'Shanghai LISTEN TECH.LTD', '74D02B': 'ASUSTek COMPUTER INC.', '601E02': 'EltexAlatau', 'E0C6B3': 'MilDef AB', '6472D8': 'GooWi Technology Co.,Limited', '60601F': 'SZ DJI TECHNOLOGY CO.,LTD', 'DC1DD4': 'Microstep-MIS spol. s r.o.', 'FCDD55': 'Shenzhen WeWins wireless Co.,Ltd', 'B01743': 'EDISON GLOBAL CIRCUITS LLC', 'D0BE2C': 'CNSLink Co., Ltd.', '40516C': 'Grandex International Corporation', 'C0885B': 'SnD Tech Co., Ltd.', '3CFB96': 'Emcraft Systems LLC', '846223': 'Shenzhen Coship Electronics Co., Ltd.', '1CFA68': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'D4A499': 'InView Technology Corporation', '08482C': 'Raycore Taiwan Co., LTD.', 'E0EDC7': 'Shenzhen Friendcom Technology Development Co., Ltd', '081F3F': 'WondaLink Inc.', 'B4DD15': 'ControlThings Oy Ab', 'C0B8B1': 'BitBox Ltd', 'F82EDB': 'RTW GmbH & Co. KG', 'C04A00': 'TP-LINK TECHNOLOGIES CO.,LTD.', '105F06': 'Actiontec Electronics, Inc', '087999': 'AIM GmbH', 'C011A6': 'Fort-Telecom ltd.', '94C962': 'Teseq AG', 'DC2A14': 'Shanghai Longjing Technology Co.', '045FA7': 'Shenzhen Yichen Technology Development Co.,LTD', '1853E0': 'Hanyang Digitech Co.Ltd', '00C14F': 'DDL Co,.ltd.', '2C26C5': 'zte corporation', 'F0219D': 'Cal-Comp Electronics & Communications Company Ltd.', '181725': 'Cameo Communications, Inc.', '8462A6': 'EuroCB (Phils), Inc.', '807B1E': 'Corsair Memory, Inc.', 'B4AB2C': 'MtM Technology Corporation', '74372F': 'Tongfang Shenzhen Cloudcomputing Technology Co.,Ltd', 'E4F365': 'Time-O-Matic, Inc.', 'DCC0DB': 'Shenzhen Kaiboer Technology Co., Ltd.', 'AC5D10': 'Pace Americas', 'FC626E': 'Beijing MDC Telecom', '6886E7': 'Orbotix, Inc.', 'C05E6F': 'V. Stonkaus firma Kodinis Raktas', '88F490': 'Jetmobile Pte Ltd', '1C9179': 'Integrated System Technologies Ltd', '38F597': 'home2net GmbH', '808B5C': 'Shenzhen Runhuicheng Technology Co., Ltd', 'D819CE': 'Telesquare', 'E0CEC3': 'ASKEY COMPUTER CORP', '84C8B1': 'Incognito Software Systems Inc.', '30D357': 'Logosol, Inc.', 'BC39A6': 'CSUN System Technology Co.,LTD', '1C5A6B': 'Philips Electronics Nederland BV', 'A875D6': 'FreeTek International Co., Ltd.', '10A743': 'SK Mtek Limited', '547FA8': 'TELCO systems, s.r.o.', '5474E6': 'Webtech Wireless', 'C46DF1': 'DataGravity', '304449': 'PLATH GmbH', 'ECB541': 'SHINANO E and E Co.Ltd.', 'E4C146': 'Objetivos y Servicios de Valor A', 'E8A364': 'Signal Path International / Peachtree Audio', 'BC51FE': 'Swann communications Pty Ltd', 'D40057': 'MC Technologies GmbH', '34FA40': 'Guangzhou Robustel Technologies Co., Limited', 'A0BAB8': 'Pixon Imaging', '74E424': 'APISTE CORPORATION', 'D410CF': 'Huanshun Network Science and Technology Co., Ltd.', '6CB311': 'Shenzhen Lianrui Electronics Co.,Ltd', '2411D0': 'Chongqing Ehs Science and Technology Development Co.,Ltd.', 'CC593E': 'TOUMAZ LTD', 'ECE915': 'STI Ltd', '80D733': 'QSR Automations, Inc.', '303D08': 'GLINTT TES S.A.', 'A81FAF': 'KRYPTON POLSKA', '08E5DA': 'NANJING FUJITSU COMPUTER PRODUCTS CO.,LTD. ', 'B461FF': 'Lumigon A/S', 'A0A130': 'DLI Taiwan Branch office', '30215B': 'Shenzhen Ostar Display Electronic Co.,Ltd', '600F77': 'SilverPlus, Inc', '94FD2E': 'Shanghai Uniscope Technologies Co.,Ltd', 'ACE64B': 'Shenzhen Baojia Battery Technology Co., Ltd.', '5884E4': 'IP500 Alliance e.V.', '789F87': 'Siemens AG I IA PP PRM', '5CE0F6': 'NIC.br- Nucleo de Informacao e Coordenacao do Ponto BR', 'C83D97': 'Nokia Corporation', '0C93FB': 'BNS Solutions', 'E44F5F': 'EDS Elektronik Destek San.Tic.Ltd.Sti', 'E86D54': 'Digit Mobile Inc', '802FDE': 'Zurich Instruments AG', '5C38E0': 'Shanghai Super Electronics Technology Co.,LTD', '0CF361': 'Java Information', 'B0358D': 'Nokia Corporation', 'D0738E': 'DONG OH PRECISION CO., LTD. ', '64C944': 'LARK Technologies, Inc', 'F8E4FB': 'Actiontec Electronics, Inc', '8C4AEE': 'GIGA TMS INC', '34C99D': 'EIDOLON COMMUNICATIONS TECHNOLOGY CO. LTD.', 'E8718D': 'Elsys Equipamentos Eletronicos Ltda', '044BFF': 'GuangZhou Hedy Digital Technology Co., Ltd', '08AF78': 'Totus Solutions, Inc.', 'C8C791': 'Zero1.tv GmbH', 'ECD925': 'RAMI', '90B11C': 'Dell Inc.', '601929': 'VOLTRONIC POWER TECHNOLOGY(SHENZHEN) CORP.', '48B253': 'Marketaxess Corporation', '305D38': 'Beissbarth ', '005D03': 'Xilinx, Inc', 'D04CC1': 'SINTRONES Technology Corp.', 'C4DA26': 'NOBLEX SA', '7CC8AB': 'Acro Associates, Inc.', '1CC316': 'MileSight Technology Co., Ltd.', '1C9492': 'RUAG Schweiz AG', 'B889CA': 'ILJIN ELECTRIC Co., Ltd.', '64F50E': 'Kinion Technology Company Limited', '6815D3': 'Zaklady Elektroniki i Mechaniki Precyzyjnej R&G S.A.', '10F3DB': 'Gridco Systems, Inc.', '503F56': 'Syncmold Enterprise Corp', '8CEEC6': 'Precepscion Pty. Ltd.', '74943D': 'AgJunction', '101248': 'ITG, Inc.', 'F0D3E7': 'Sensometrix SA', 'B01266': 'Futaba-Kikaku', '7CC8D0': 'TIANJIN YAAN TECHNOLOGY CO., LTD.', '88E917': 'Tamaggo', '909DE0': 'Newland Design + Assoc. Inc.', 'D8AF3B': 'Hangzhou Bigbright Integrated communications system Co.,Ltd', '6032F0': 'Mplus technology', '2829D9': 'GlobalBeiMing technology (Beijing)Co. Ltd', '88615A': 'Siano Mobile Silicon Ltd.', '70E24C': 'SAE IT-systems GmbH & Co. KG', '68B43A': 'WaterFurnace International, Inc.', '4C7897': 'Arrowhead Alarm Products Ltd', '44E8A5': 'Myreka Technologies Sdn. Bhd.', 'B482C5': 'Relay2, Inc.', '00FD4C': 'NEVATEC', '144319': 'Creative&Link Technology Limited', '58ECE1': 'Newport Corporation', '4C09B4': 'zte corporation', '58CF4B': 'Lufkin Industries', '985E1B': 'ConversDigital Co., Ltd.', '60D1AA': 'Vishal Telecommunications Pvt Ltd', '60D2B9': 'REALAND BIO CO., LTD.', '30FD11': 'MACROTECH (USA) INC.', 'D8AFF1': 'Panasonic Appliances Company', '2C6289': 'Regenersis (Glenrothes) Ltd', '1848D8': 'Fastback Networks', '709BA5': 'Shenzhen Y&D Electronics Co.,LTD.', 'F45433': 'Rockwell Automation', 'B48910': 'Coster T.E. S.P.A.', '3CF392': 'Virtualtek. Co. Ltd', '149FE8': 'Lenovo Mobile Communication Technology Ltd.', 'BCD940': 'ASR Co,.Ltd.', 'C455C2': 'Bach-Simpson', '00E8AB': 'Meggitt Training Systems, Inc.', 'B4218A': 'Dog Hunter LLC', '049C62': 'BMT Medical Technology s.r.o.', '0C2A69': 'electric imp, incorporated', 'B0C83F': 'Jiangsu Cynray IOT Co., Ltd.', '388AB7': 'ITC Networks', 'D48CB5': 'Cisco Systems, Inc', 'BCC23A': 'Thomson Video Networks', 'ACC2EC': "CLT INT'L IND. CORP.", 'B0750C': 'QA Cafe', 'B4E1EB': 'Private', 'A865B2': 'DONGGUAN YISHANG ELECTRONIC TECHNOLOGY CO., LIMITED', 'E8D0FA': 'MKS Instruments Deutschland GmbH', '98262A': 'Applied Research Associates, Inc', '3C9174': 'ALONG COMMUNICATION TECHNOLOGY', 'FC5090': 'SIMEX Sp. z o.o.', '60B982': 'RO.VE.R. Laboratories S.p.A.', 'ECD19A': 'Zhuhai Liming Industries Co., Ltd', '348137': 'UNICARD SA', 'DC37D2': 'Hunan HKT Electronic Technology Co., Ltd', '549D85': 'EnerAccess inc', '407074': 'Life Technology (China) Co., Ltd', '1CF4CA': 'Private', 'B4A4B5': 'Zen Eye Co.,Ltd', '20F002': 'MTData Developments Pty. Ltd.', '38B12D': 'Sonotronic Nagel GmbH', 'ACEE3B': '6harmonics Inc', 'B46238': 'Exablox', 'C8BBD3': 'Embrane', '3C363D': 'Nokia Corporation', '808698': 'Netronics Technologies Inc.', '1C6BCA': 'Mitsunami Co., Ltd.', '642400': 'Xorcom Ltd.', 'E83EFB': 'GEODESIC LTD.', '9CE10E': 'NCTech Ltd', 'A06D09': 'Intelcan Technosystems Inc.', '60F3DA': 'Logic Way GmbH', '2C750F': 'Shanghai Dongzhou-Lawton Communication Technology Co. Ltd.', '5C5015': 'Cisco Systems, Inc', 'F8DB4C': 'PNY Technologies, INC.', '0C9D56': 'Consort Controls Ltd', '980284': 'Theobroma Systems GmbH', '1CD40C': 'Kriwan Industrie-Elektronik GmbH', 'A4B980': 'Parking BOXX Inc.', 'A47C14': 'ChargeStorm AB', '30B216': 'ABB AG - Power Grids - Grid Automation', '8020AF': 'Trade FIDES, a.s.', '3CB87A': 'Private', 'AC1461': 'ATAW Co., Ltd.', 'E4C6E6': 'Mophie, LLC', '502D1D': 'Nokia Corporation', 'F48E09': 'Nokia Corporation', '5848C0': 'COFLEC', '587FC8': 'S2M', '200505': 'RADMAX COMMUNICATION PRIVATE LIMITED', '002D76': 'TITECH GmbH', '8C604F': 'Cisco Systems, Inc', '3C9F81': 'Shenzhen CATIC Bit Communications Technology Co.,Ltd', '18B591': 'I-Storm', '445F7A': 'Shihlin Electric & Engineering Corp.', '141A51': 'Treetech Sistemas Digitais', '088F2C': 'Hills Sound Vision & Lighting', '441319': 'WKK TECHNOLOGY LTD.', 'C035BD': 'Velocytech Aps', '287184': 'Spire Payments', '7CB03E': 'OSRAM GmbH', 'BC8B55': 'NPP ELIKS America Inc. DBA T&M Atlantic', '50ED94': 'EGATEL SL', '48A22D': 'Shenzhen Huaxuchang Telecom Technology Co.,Ltd', 'C86000': 'ASUSTek COMPUTER INC.', '8C57FD': 'LVX Western', '54E63F': 'ShenZhen LingKeWeiEr Technology Co., Ltd.', '20FABB': 'Cambridge Executive Limited', '844915': 'vArmour Networks, Inc.', 'C84544': 'Asia Pacific CIS (Wuxi) Co, Ltd', 'E0EF25': 'Lintes Technology Co., Ltd.', 'B01C91': 'Elim Co', '04F17D': 'Tarana Wireless', '2CBE97': 'Ingenieurbuero Bickele und Buehler GmbH', 'C0493D': 'MAITRISE TECHNOLOGIQUE', 'A05E6B': 'MELPER Co., Ltd.', 'D878E5': 'KUHN SA', 'D824BD': 'Cisco Systems, Inc', '28CD1C': 'Espotel Oy', 'AC0DFE': 'Ekon GmbH - myGEKKO', 'FC5B26': 'MikroBits', '40F407': 'Nintendo Co., Ltd.', '900A3A': 'PSG Plastic Service GmbH', '7041B7': 'Edwards Lifesciences LLC', 'DCA8CF': 'New Spin Golf, LLC.', 'A849A5': 'Lisantech Co., Ltd.', 'C467B5': 'Libratone A/S', '4C3910': 'Newtek Electronics co., Ltd.', '98BC57': 'SVA TECHNOLOGIES CO.LTD', 'DC3C2E': 'Manufacturing System Insights, Inc.', '903AA0': 'Nokia', 'D443A8': 'Changzhou Haojie Electric Co., Ltd.', 'BCE59F': 'WATERWORLD Technology Co.,LTD', 'F83553': 'Magenta Research Ltd.', 'F4044C': 'ValenceTech Limited', '3497FB': 'ADVANCED RF TECHNOLOGIES INC', 'F03A55': 'Omega Elektronik AS', '70A66A': 'Prox Dynamics AS', 'DC3E51': 'Solberg & Andersen AS', '843611': 'hyungseul publishing networks', '3440B5': 'IBM', 'C81AFE': 'DLOGIC GmbH', 'EC63E5': 'ePBoard Design LLC', '94DB49': 'SITCORP', '3CE5B4': 'KIDASEN INDUSTRIA E COMERCIO DE ANTENAS LTDA', '08D09F': 'Cisco Systems, Inc', 'D4D748': 'Cisco Systems, Inc', '344F69': 'EKINOPS SAS', '644BF0': 'CalDigit, Inc', 'F0620D': 'Shenzhen Egreat Tech Corp.,Ltd', 'B06CBF': '3ality Digital Systems GmbH', '2C67FB': 'ShenZhen Zhengjili Electronics Co., LTD', 'F8313E': 'endeavour GmbH', '143605': 'Nokia Corporation', '64ED62': 'WOORI SYSTEMS Co., Ltd', '54D0ED': 'AXIM Communications', 'B4944E': 'WeTelecom Co., Ltd.', 'E00B28': 'Inovonics', '48022A': 'B-Link Electronic Limited', '345B11': 'EVI HEAT AB', 'C87CBC': 'Valink Co., Ltd. ', 'B81413': 'Keen High Holding(HK) Ltd.', '5CC9D3': 'PALLADIUM ENERGY ELETRONICA DA AMAZONIA LTDA', '4050E0': 'Milton Security Group LLC', '70CA9B': 'Cisco Systems, Inc', '78BAD0': 'Shinybow Technology Co. Ltd.', '2C3F38': 'Cisco Systems, Inc', '2C002C': 'UNOWHY', '24E6BA': 'JSC Zavod im. Kozitsky', 'CCA374': 'Guangdong Guanglian Electronic Technology Co.Ltd', '58677F': 'Clare Controls Inc.', '68BC0C': 'Cisco Systems, Inc', '18E80F': 'Viking Electronics Inc.', 'CC6BF1': 'Sound Masking Inc.', '2C9717': 'I.C.Y. B.V.', '0C5A19': 'Axtion Sdn Bhd', 'A8BD1A': 'Honey Bee (Hong Kong) Limited', '000830': 'Cisco Systems, Inc', '9CA3BA': 'SAKURA Internet Inc.', 'C4EEAE': 'VSS Monitoring', 'F8D3A9': 'AXAN Networks', '988217': 'Disruptive Ltd', '8C8A6E': 'ESTUN AUTOMATION TECHNOLOY CO., LTD', 'A078BA': 'Pantech Co., Ltd.', '704642': 'CHYNG HONG ELECTRONIC CO., LTD.', '9C5C8D': 'FIREMAX INDÚSTRIA E COMÉRCIO DE PRODUTOS ELETRÔNICOS LTDA', 'D4206D': 'HTC Corporation', '7C1E52': 'Microsoft', 'DCB4C4': 'Microsoft XCG', 'ACCB09': 'Hefcom Metering (Pty) Ltd', '1866E3': 'Veros Systems, Inc.', '74FDA0': 'Compupal (Group) Corporation ', 'CCB8F1': 'EAGLE KINGDOM TECHNOLOGIES LIMITED', 'A429B7': 'bluesky', '48F317': 'Private', 'BC779F': 'SBM Co., Ltd.', '406AAB': 'RIM', '248707': 'SEnergy Corporation', 'CCF8F0': "Xi'an HISU Multimedia Technology Co.,Ltd.", '04888C': 'Eifelwerk Butler Systeme GmbH', '30DE86': 'Cedac Software S.r.l.', '18C451': 'Tucson Embedded Systems', 'D41C1C': 'RCF S.P.A.', 'C4C19F': 'National Oilwell Varco Instrumentation, Monitoring, and Optimization (NOV IMO)', '58920D': 'Kinetic Avionics Limited', 'AC02EF': 'Comsis', 'B8B42E': 'Gionee Communication Equipment Co,Ltd.ShenZhen', '443EB2': 'DEOTRON Co., LTD.', 'D059C3': 'CeraMicro Technology Corporation', 'D45AB2': 'Galleon Systems', '182C91': 'Concept Development, Inc.', '24EC99': 'ASKEY COMPUTER CORP', '280CB8': 'Mikrosay Yazilim ve Elektronik A.S.', 'FC1794': 'InterCreative Co., Ltd', '3CC99E': 'Huiyang Technology Co., Ltd', 'B40B7A': 'Brusa Elektronik AG', '449CB5': 'Alcomp, Inc', 'B82ADC': 'EFR Europäische Funk-Rundsteuerung GmbH', '78BEB6': 'Enhanced Vision', 'B4FC75': 'SEMA Electronics(HK) CO.,LTD', 'B0BF99': 'WIZITDONGDO', '2C1EEA': 'AERODEV', '1C8E8E': 'DB Communication & Systems Co., ltd.', '40F14C': 'ISE Europe SPRL', 'E8944C': 'Cogent Healthcare Systems Ltd', '9067F3': 'Alcatel Lucent', 'A44B15': 'Sun Cupid Technology (HK) LTD', '48C862': 'Simo Wireless,Inc.', '70B921': 'Fiberhome Telecommunication Technologies Co.,LTD', 'A0E295': 'DAT System Co.,Ltd', 'A0165C': 'Triteka LTD', '9C417C': 'Hame Technology Co., Limited ', '9C6ABE': 'QEES ApS.', '2C2172': 'Juniper Networks', '900917': 'Far-sighted mobile', '044665': 'Murata Manufacturing Co., Ltd.', 'D4F0B4': 'Napco Security Technologies', '9C934E': 'Xerox Corporation', '68F895': 'Redflow Limited', '3826CD': 'ANDTEK', 'D8973B': 'Artesyn Embedded Technologies', '2C8BF2': 'Hitachi Metals America Ltd', 'F4A52A': 'Hawa Technologies Inc', '0C6E4F': 'PrimeVOLT Co., Ltd.', 'E8B748': 'Cisco Systems, Inc', '58E476': 'CENTRON COMMUNICATIONS TECHNOLOGIES FUJIAN CO.,LTD', '447E95': 'Alpha and Omega, Inc', '986022': 'EMW Co., Ltd.', 'BC99BC': 'FonSee Technology Inc.', '24CBE7': 'MYK, Inc.', 'B0A10A': 'Pivotal Systems Corporation', '802DE1': 'Solarbridge Technologies', '3C672C': 'Sciovid Inc.', '18D071': 'DASAN CO., LTD.', 'B8D49D': 'M Seven System Ltd.', '88BFD5': 'Simple Audio Ltd', '781DFD': 'Jabil Inc', '18AEBB': 'Siemens Convergence Creators GmbH&Co.KG', '50FAAB': 'L-tek d.o.o.', '3891FB': 'Xenox Holding BV', 'BC5FF4': 'ASRock Incorporation', 'E8B4AE': 'Shenzhen C&D Electronics Co.,Ltd', 'D8C068': 'Netgenetech.co.,ltd.', '601199': 'Siama Systems Inc', 'A88CEE': 'MicroMade Galka i Drozdz sp.j.', 'A8E018': 'Nokia Corporation', 'CCC62B': 'Tri-Systems Corporation', '147411': 'RIM', 'F8A9DE': 'PUISSANCE PLUS', 'DC9B1E': 'Intercom, Inc.', 'DC2B66': 'InfoBLOCK S.A. de C.V.', '6C81FE': 'Mitsuba Corporation', '783F15': 'EasySYNC Ltd.', 'B8871E': 'Good Mind Industries Co., Ltd.', 'D4F027': 'Trust Power Ltd.', '0455CA': 'BriView (Xiamen) Corp.', '1435B3': 'Future Designs, Inc.', 'AC932F': 'Nokia Corporation', '0054AF': 'Continental Automotive Systems Inc.', 'ACCABA': 'Midokura Co., Ltd. ', '0C8112': 'Private', '9C95F8': 'SmartDoor Systems, LLC', 'C027B9': 'Beijing National Railway Research & Design Institute of Signal & Communication Co., Ltd.', 'B0BDA1': 'ZAKLAD ELEKTRONICZNY SIMS', '64D1A3': 'Sitecom Europe BV', 'F43E9D': 'Benu Networks, Inc.', '7819F7': 'Juniper Networks', '64094C': 'Beijing Superbee Wireless Technology Co.,Ltd', '7C7D41': 'Jinmuyu Electronics Co., Ltd.', '4C1480': 'NOREGON SYSTEMS, INC', '0CF3EE': 'EM Microelectronic', '70B265': 'Hiltron s.r.l.', '04E2F8': 'AEP Ticketing solutions srl', 'EC9ECD': 'Artesyn Embedded Technologies', '8C5105': 'Shenzhen ireadygo Information Technology CO.,LTD.', 'C8208E': 'Storagedata', '5C5EAB': 'Juniper Networks', '28E297': 'Shanghai InfoTM Microelectronics Co.,Ltd.', '34B571': 'PLDS', '3C7437': 'RIM', 'EC9233': 'Eddyfi NDT Inc', '2872F0': 'ATHENA', '1C19DE': 'eyevis GmbH', '9C807D': 'SYSCABLE Korea Inc.', '743889': 'ANNAX Anzeigesysteme GmbH', '44D2CA': 'Anvia TV Oy', '386E21': 'Wasion Group Ltd.', '609E64': 'Vivonic GmbH', 'BC15A6': 'Taiwan Jantek Electronics,Ltd.', 'DCDECA': 'Akyllor', 'A4856B': 'Q Electronics Ltd', '20D5AB': 'Korea Infocom Co.,Ltd.', 'A0AAFD': 'EraThink Technologies Corp.', '6CA906': 'Telefield Ltd', '78223D': 'Affirmed Networks', '3C02B1': 'Creation Technologies LP', 'E441E6': 'Ottec Technology GmbH', 'BC71C1': 'XTrillion, Inc.', 'E0E8E8': 'Olive Telecommunication Pvt. Ltd', '6052D0': 'FACTS Engineering ', 'B08991': 'LGE ', '30142D': 'Piciorgros GmbH', '50AF73': 'Shenzhen Bitland Information Technology Co., Ltd.', '5C9AD8': 'FUJITSU LIMITED', 'A4C0E1': 'Nintendo Co., Ltd.', '4C3B74': 'VOGTEC(H.K.) Co., Ltd', '684352': 'Bhuu Limited', 'ECE90B': 'SISTEMA SOLUCOES ELETRONICAS LTDA - EASYTECH', 'A08C9B': 'Xtreme Technologies Corp', 'A83944': 'Actiontec Electronics, Inc', '74E06E': 'Ergophone GmbH', '0CF0B4': 'Globalsat International Technology Ltd', '48DF1C': 'Wuhan NEC Fibre Optic Communications industry Co. Ltd', 'F8F014': 'RackWare Inc.', '2826A6': 'PBR electronics GmbH', 'B428F1': 'E-Prime Co., Ltd.', 'C01242': 'Alpha Security Products', 'BC20BA': 'Inspur (Shandong) Electronic Information Co., Ltd', '1CFEA7': 'IDentytech Solutins Ltd.', '304EC3': 'Tianjin Techua Technology Co., Ltd.', 'B4CFDB': 'Shenzhen Jiuzhou Electric Co.,LTD', 'FCD4F2': 'The Coca Cola Company', '5C6A7D': 'KENTKART EGE ELEKTRONIK SAN. VE TIC. LTD. STI. ', 'AC2FA8': 'Humannix Co.,Ltd.', '1064E2': 'ADFweb.com s.r.l.', 'CC34D7': 'GEWISS S.P.A.', 'F02A61': 'Waldo Networks, Inc.', 'C8A70A': 'Verizon Business', '60DA23': 'Estech Co.,Ltd', '44DCCB': 'SEMINDIA SYSTEMS PVT LTD', 'D49C8E': 'University of FUKUI', 'A0DE05': 'JSC Irbis-T', '0817F4': 'IBM Corp', 'CCD811': 'Aiconn Technology Corporation', '90610C': 'Fida International (S) Pte Ltd', '3C5F01': 'Synerchip Co., Ltd.', 'ECBBAE': 'Digivoice Tecnologia em Eletronica Ltda', '34A183': 'AWare, Inc', '9873C4': 'Sage Electronic Engineering LLC', 'B40142': 'GCI Science & Technology Co.,LTD', '740ABC': 'LightwaveRF Technology Ltd', 'AC8112': 'Gemtek Technology Co., Ltd.', '686359': 'Advanced Digital Broadcast SA', '28061E': 'NINGBO GLOBAL USEFUL ELECTRIC CO.,LTD', '64E8E6': 'global moisture management system', 'E0D10A': 'Katoudenkikougyousyo co ltd', 'C44B44': 'Omniprint Inc.', 'F43814': 'Shanghai Howell Electronic Co.,Ltd', '44599F': 'Criticare Systems, Inc', '3C2F3A': 'SFORZATO Corp.', '74CE56': 'Packet Force Technology Limited Company', '10A13B': 'FUJIKURA RUBBER LTD.', 'F4E142': 'Delta Elektronika BV', '18922C': 'Virtual Instruments', 'A49B13': 'Digital Check', 'C8EE08': 'TANGTOP TECHNOLOGY CO.,LTD', '7472F2': 'Chipsip Technology Co., Ltd.', 'E0A670': 'Nokia Corporation', '48C8B6': 'SysTec GmbH', '3C6278': 'SHENZHEN JETNET TECHNOLOGY CO.,LTD.', 'D43D67': 'Carma Industries Inc.', 'C8D5FE': 'Shenzhen Zowee Technology Co., Ltd', '00BD27': 'Exar Corp.', '5C4058': 'Jefferson Audio Video Systems, Inc.', '58D08F': 'IEEE 1904.1 Working Group', '6C9CE9': 'Nimble Storage', 'CC09C8': 'IMAQLIQ LTD', '9C4563': 'DIMEP Sistemas', '2C3068': 'Pantech Co.,Ltd', '58DB8D': 'Fast Co., Ltd.', 'E446BD': 'C&C TECHNIC TAIWAN CO., LTD.', '8CDD8D': 'Wifly-City System Inc.', '20A2E7': 'Lee-Dickens Ltd', 'FCEDB9': 'Arrayent', '44ED57': 'Longicorn, inc.', 'EC98C1': 'Beijing Risbo Network Technology Co.,Ltd', '7CB542': 'ACES Technology', '905446': 'TES ELECTRONIC SOLUTIONS', '544A05': 'wenglor sensoric gmbh', 'F4DCDA': 'Zhuhai Jiahe Communication Technology Co., limited', 'E80462': 'Cisco Systems, Inc', 'DCD0F7': 'Bentek Systems Ltd.', '6854F5': 'enLighted Inc', 'D4A928': 'GreenWave Reality Inc', 'C46354': 'U-Raku, Inc.', '405FBE': 'RIM', 'E06290': 'Jinan Jovision Science & Technology Co., Ltd.', '100E2B': 'NEC CASIO Mobile Communications', '70E139': '3view Ltd', '18422F': 'Alcatel Lucent', '38A95F': 'Actifio Inc', '98E165': 'Accutome', '785712': 'Mobile Integration Workgroup', '380A0A': 'Sky-City Communication and Electronics Limited Company', '0CD696': 'Amimon Ltd', 'F4DC4D': 'Beijing CCD Digital Technology Co., Ltd', '4013D9': 'Global ES', '4083DE': 'Zebra Technologies Inc', '8897DF': 'Entrypass Corporation Sdn. Bhd.', '24AF54': 'NEXGEN Mediatech Inc.', 'F0F842': 'KEEBOX, Inc.', 'DC4EDE': 'SHINYEI TECHNOLOGY CO., LTD.', 'AC4FFC': 'SVS-VISTEK GmbH', 'B43741': 'Consert, Inc.', '94857A': 'Evantage Industries Corp', 'E087B1': 'Nata-Info Ltd.', 'D496DF': 'SUNGJIN C&T CO.,LTD', '447C7F': 'Innolight Technology Corporation', '5C864A': 'Secret Labs LLC', 'F0AD4E': 'Globalscale Technologies, Inc.', '903D5A': 'Shenzhen Wision Technology Holding Limited', '08FAE0': 'Fohhn Audio AG', 'A04041': 'SAMWONFA Co.,Ltd.', '40406B': 'Icomera', '6C22AB': 'Ainsworth Game Technology', '7CA29B': 'D.SignT GmbH & Co. KG', '68CA00': 'Octopus Systems Limited', 'E0589E': 'Laerdal Medical', '3018CF': 'DEOS control systems GmbH', '58B9E1': 'Crystalfontz America, Inc.', '20D906': 'Iota, Inc.', 'F45595': 'HENGBAO Corporation LTD.', '949C55': 'Alta Data Technologies', 'D479C3': 'Cameronet GmbH & Co. KG', '0C1DC2': 'SeAH Networks', '5475D0': 'Cisco Systems, Inc', '9C4E20': 'Cisco Systems, Inc', 'D87533': 'Nokia Corporation', '4C5DCD': 'Oy Finnish Electric Vehicle Technologies Ltd', '180C77': 'Westinghouse Electric Company, LLC', '1C3A4F': 'AccuSpec Electronics, LLC', '70D57E': 'Scalar Corporation', 'B0E39D': 'CAT SYSTEM CO.,LTD.', '7C2E0D': 'Blackmagic Design', '9835B8': 'Assembled Products Corporation', '288915': 'CashGuard Sverige AB', '14A62C': 'S.M. Dezac S.A.', '547FEE': 'Cisco Systems, Inc', 'ACEA6A': 'GENIX INFOCOMM CO., LTD.', 'A8F470': 'Fujian Newland Communication Science Technologies Co.,Ltd.', '6089B7': 'KAEL MÜHENDİSLİK ELEKTRONİK TİCARET SANAYİ LİMİTED ŞİRKETİ', '30525A': 'NST Co., LTD', '2CA780': 'True Technologies Inc.', '7C6F06': 'Caterpillar Trimble Control Technologies', '70D5E7': 'Wellcore Corporation', '5CE286': 'Nortel Networks', '8C640B': 'Beyond Devices d.o.o.', 'FCE192': 'Sichuan Jinwangtong Electronic Science&Technology Co,.Ltd', '601283': 'TSB REAL TIME LOCATION SYSTEMS S.L.', '98DCD9': 'UNITEC Co., Ltd.', 'C0CFA3': 'Creative Electronics & Software, Inc.', '2CA835': 'RIM', 'C41ECE': 'HMI Sources Ltd.', '10E6AE': 'Source Technologies, LLC', '6C32DE': 'Indieon Technologies Pvt. Ltd.', '94236E': 'Shenzhen Junlan Electronic Ltd', '408A9A': 'TITENG CO., Ltd.', 'F445ED': 'Portable Innovation Technology Ltd.', '3CF72A': 'Nokia Corporation', '545FA9': 'Teracom Limited', '04FE7F': 'Cisco Systems, Inc', 'EC4476': 'Cisco Systems, Inc', 'E0BC43': 'C2 Microsystems, Inc.', 'B86491': 'CK Telecom Ltd', 'DC49C9': 'CASCO SIGNAL LTD', '70D880': 'Upos System sp. z o.o.', 'A05DC1': 'TMCT Co., LTD.', '8C736E': 'FUJITSU LIMITED', '50F003': 'Open Stack, Inc.', '4012E4': 'Compass-EOS', '446C24': 'Reallin Electronic Co.,Ltd', '2046F9': 'Advanced Network Devices (dba:AND)', '487119': 'SGB GROUP LTD.', '583CC6': 'Omneality Ltd.', 'B0C8AD': 'People Power Company', '181714': 'DAEWOOIS', 'F0EC39': 'Essec', 'AC583B': 'Human Assembler, Inc.', 'E8E776': 'Shenzhen Kootion Technology Co., Ltd', 'A4B1EE': 'H. ZANDER GmbH & Co. KG', '842141': 'Shenzhen Ginwave Technologies Ltd.', '681FD8': 'Siemens Industry, Inc.', 'A0231B': 'TeleComp R&D Corp.', 'B8A3E0': 'BenRui Technology Co.,Ltd', '3CF52C': 'DSPECIALISTS GmbH', '6C1811': 'Decatur Electronics', 'F8E968': 'Egker Kft.', '5403F5': 'EBN Technology Corp.', 'A8995C': 'aizo ag', '04C05B': 'Tigo Energy', '8038FD': 'LeapFrog Enterprises, Inc.', 'ACBEB6': 'Visualedge Technology Co., Ltd.', '2C9127': 'Eintechno Corporation', '4001C6': '3COM EUROPE LTD', '9C5E73': 'Calibre UK LTD', '5C1437': 'Thyssenkrupp Aufzugswerke GmbH', '9C55B4': 'I.S.E. S.r.l.', 'E4751E': 'Getinge Sterilization AB', '1065A3': 'Core Brands LLC', '4C63EB': 'Application Solutions (Electronics and Vision) Ltd', '702F97': 'Aava Mobile Oy', '10CA81': 'PRECIA', '50252B': 'Nethra Imaging Incorporated', 'F8811A': 'OVERKIZ', '48343D': 'IEP GmbH', '609F9D': 'CloudSwitch', 'F0C24C': 'Zhejiang FeiYue Digital Technology Co., Ltd', 'B894D2': 'Retail Innovation HTT AB', '94BA31': 'Visiontec da Amazônia Ltda.', '9C5B96': 'NMR Corporation', '60F13D': 'JABLOCOM s.r.o.', 'B0E97E': 'Advanced Micro Peripherals', '40A6A4': 'PassivSystems Ltd', '3863F6': '3NOD MULTIMEDIA(SHENZHEN)CO.,LTD', 'D4AAFF': 'MICRO WORLD ', '78B81A': 'INTER SALES A/S', 'CCCC4E': 'Sun Fountainhead USA. Corp ', '688540': 'IGI Mobile, Inc.', 'A09A5A': 'Time Domain', '64A837': 'Juni Korea Co., Ltd', 'CC0080': 'BETTINI SRL', '644BC3': 'Shanghai WOASiS Telecommunications Ltd., Co.', 'ACD180': 'Crexendo Business Solutions, Inc.', '942E63': 'Finsécur', 'AC8317': 'Shenzhen Furtunetel Communication Co., Ltd', '002717': 'CE Digital(Zhenjiang)Co.,Ltd', '002716': 'Adachi-Syokai Co., Ltd.', '202CB7': 'Kong Yue Electronics & Information Industry (Xinhui) Ltd.', '74E537': 'RADSPIN', '0026C3': 'Insightek Corp.', '0026C0': 'EnergyHub', '0026C1': 'ARTRAY CO., LTD.', '0026BE': 'Schoonderbeek Elektronica Systemen B.V.', '0026EC': 'Legrand Home Systems, Inc', '0026E9': 'SP Corp', '0026EB': 'Advanced Spectrum Technology Co., Ltd.', '0026E1': 'Stanford University, OpenFlow Group', '0026DC': 'Optical Systems Design', '002700': 'Shenzhen Siglent Technology Co., Ltd.', '0026D4': 'IRCA SpA', '0026B5': 'ICOMM Tele Ltd', '0026A1': 'Megger', '00264C': 'Shanghai DigiVision Technology Co., Ltd.', '002644': 'Thomson Telecom Belgium', '002646': 'SHENYANG TONGFANG MULTIMEDIA TECHNOLOGY COMPANY LIMITED', '00263F': 'LIOS Technology GmbH', '002673': 'RICOH COMPANY,LTD.', '00266D': 'MobileAccess Networks', '00266F': 'Coordiwise Technology Corp.', '00266E': 'Nissho-denki Co.,LTD.', '00267F': 'Zenterio AB', '00267D': 'A-Max Technology Macao Commercial Offshore Company Limited', '00267C': 'Metz-Werke GmbH & Co KG', '002674': 'Electronic Solutions, Inc.', '002663': 'Shenzhen Huitaiwei Tech. Ltd, co.', '002661': 'Irumtek Co., Ltd.', '00265B': 'Hitron Technologies. Inc', '002656': 'Sansonic Electronics USA', '002658': 'T-Platforms (Cyprus) Limited', '002681': 'Interspiro AB', '002683': 'Ajoho Enterprise Co., Ltd.', '00268C': 'StarLeaf Ltd.', '00268B': 'Guangzhou Escene Computer Technology Limited', '00263B': 'Onbnetech', '002634': 'Infineta Systems, Inc', '002698': 'Cisco Systems, Inc', '00269B': 'SOKRAT Ltd.', '0025D4': 'General Dynamics Mission Systems', '0025CA': 'LS Research, LLC', '002605': 'CC Systems AB', '002602': 'SMART Temps LLC', '0025FB': 'Tunstall Healthcare A/S', '0025F4': 'KoCo Connector AG', '00262F': 'HAMAMATSU TOA ELECTRONICS', '002631': 'COMMTACT LTD', '00261A': 'Femtocomm System Technology Corp.', '0025E3': 'Hanshinit Inc.', '002606': 'RAUMFELD GmbH', '002607': 'Enabling Technology Pty Ltd', '0025DC': 'Sumitomo Electric Industries,Ltd', '002624': 'Thomson Inc.', '0025EF': 'I-TEC Co., Ltd.', '002598': 'Zhong Shan City Litai Electronic Industrial Co. Ltd', '002599': 'Hedon e.d. B.V.', '002597': 'Kalki Communication Technologies', '0025A7': 'itron', '0025A9': 'Shanghai Embedway Information Technologies Co.,Ltd', '00259F': 'TechnoDigital Technologies GmbH', '00259D': 'Private', '002572': 'Nemo-Q International AB', '00256B': 'ATENIX E.E. s.r.l.', '00256E': 'Van Breda B.V.', '002565': 'Vizimax Inc.', '002592': 'Guangzhou Shirui Electronic Co., Ltd', '002594': 'Eurodesign BG LTD', '00258A': 'Pole/Zero Corporation', '002579': 'J & F Labs', '00257F': 'CallTechSolution Co.,Ltd', '002577': 'D-BOX Technologies', '0025C2': 'RingBell Co.,Ltd.', '002584': 'Cisco Systems, Inc', '00255E': 'Shanghai Dare Technologies Co.,Ltd.', '00255F': 'SenTec AG', '0025B4': 'Cisco Systems, Inc', '0025B2': 'MBDA Deutschland GmbH', '002529': 'COMELIT GROUP S.P.A', '00252A': 'Chengdu GeeYa Technology Co.,LTD', '002528': 'Daido Signal Co., Ltd.', '002526': 'Genuine Technologies Co., Ltd.', '002521': 'Logitek Electronic Systems, Inc.', '00251F': 'ZYNUS VISION INC.', '00251E': 'ROTEL TECHNOLOGIES', '002503': 'IBM Corp', '002504': 'Valiant Communications Limited', '00253B': 'din Dietmar Nocker Facilitymanagement GmbH', '00253D': 'DRS Consolidated Controls', '002519': 'Viaas Inc', '002535': 'Minimax GmbH & Co KG', '002513': 'CXP DIGITAL BV', '0024C2': 'Asumo Co.,Ltd.', '0024BF': 'CIAT', '0024C0': 'NTI COMODO INC', '0024BB': 'CENTRAL Corporation', '0024E4': 'Withings', '0024DE': 'GLOBAL Technology Inc.', '0024A4': 'Siklu Communication', '00249A': 'Beijing Zhongchuang Telecommunication Test Co., Ltd.', '00249E': 'ADC-Elektronik GmbH', '00249F': 'RIM Testing Services', '0024ED': 'YT Elec. Co,.Ltd.', '0024EC': 'United Information Technology Co.,Ltd.', '0024E6': 'In Motion Technology Inc.', '0024E7': 'Plaster Networks', '0024AE': 'IDEMIA', '0024AD': 'Adolf Thies Gmbh & Co. KG', '0024A7': 'Advanced Video Communications Inc.', '0024AB': 'A7 Engineering, Inc.', '0024DC': 'Juniper Networks', '0024DB': 'Alcohol Monitoring Systems', '0024DD': 'Centrak, Inc.', '0024D5': 'Winward Industrial Limited', '0024BC': 'HuRob Co.,Ltd', '0024B7': 'GridPoint, Inc.', '0024EA': 'iris-GmbH infrared & intelligent sensors', '00245F': 'Vine Telecom CO.,Ltd.', '002455': 'MuLogic BV', '00245A': 'Nanjing Panda Electronics Company Limited', '00245B': 'RAIDON TECHNOLOGY, INC.', '002459': 'ABB Automation products GmbH', '00243A': 'Ludl Electronic Products', '002439': 'Digital Barriers Advanced Technologies', '002434': 'Lectrosonics, Inc.', '00244E': 'RadChips, Inc.', '002488': 'Centre For Development Of Telematics', '00248F': 'DO-MONIX', '00242F': 'Micron', '002479': 'Optec Displays, Inc.', '002466': 'Unitron nv', '002468': 'Sumavision Technologies Co.,Ltd', '00240B': 'Virtual Computer Inc.', '002402': 'Op-Tection GmbH', '0023FC': 'Ultra Stereo Labs, Inc', '0023DB': 'saxnet gmbh', '0023C8': 'TEAM-R', '0023EB': 'Cisco Systems, Inc', '0023EC': 'Algorithmix GmbH', '002412': 'Benign Technologies Co, Ltd.', '00240D': 'OnePath Networks LTD.', '002426': 'NOHMI BOSAI LTD.', '002429': 'MK MASTER INC.', '002418': 'Nextwave Semiconductor', '002391': 'Maxian', '002392': 'Proteus Industries Inc.', '002393': 'AJINEXTEK', '00238D': 'Techno Design Co., Ltd.', '0023C1': 'Securitas Direct AB', '0023BB': 'Schmitt Industries', '002387': 'ThinkFlood, Inc.', '002384': 'GGH Engineering s.r.l.', '00236F': 'DAQ System', '002369': 'Cisco-Linksys, LLC', '0023AA': 'HFR, Inc.', '0023A5': 'SageTV, LLC', '00239E': 'Jiangsu Lemote Technology Corporation Limited', '00237F': 'PLANTRONICS, INC.', '002323': 'Zylin AS', '0022F8': 'PIMA Electronic Systems Ltd.', '00231C': 'Fourier Systems Ltd.', '00231D': 'Deltacom Electronics Ltd', '00230F': 'Hirsch Electronics Corporation', '00230A': 'ARBURG GmbH & Co KG', '002330': 'DIZIPIA, INC.', '00232D': 'SandForce', '0022DE': 'OPPO Digital, Inc.', '002316': 'KISAN ELECTRONICS CO', '0022F1': 'Private', '002287': 'Titan Wireless LLC', '002288': 'Sagrad, Inc.', '002285': 'NOMUS COMM SYSTEMS', '002281': 'Daintree Networks Pty', '0022D7': 'Nintendo Co., Ltd.', '0022D6': 'Cypak AB', '0022D0': 'Polar Electro Oy', '002295': 'SGM Technology for lighting spa', '0022A2': 'Xtramus Technologies', '0022C3': 'Zeeport Technology Inc.', '002273': 'Techway', '00226B': 'Cisco-Linksys, LLC', '002267': 'Nortel Networks', '00229E': 'Social Aid Research Co., Ltd.', '00222B': 'Nucomm, Inc.', '002226': 'Avaak, Inc.', '002221': 'ITOH DENKI CO,LTD.', '00221D': 'Freegene Technology LTD', '002240': 'Universal Telecom S/A', '002242': 'Alacron Inc.', '00220A': 'OnLive, Inc', '002203': 'Glensound Electronics Ltd', '002204': 'KORATEK', '002245': 'Leine & Linde AB', '002249': 'HOME MULTIENERGY SL', '002234': 'Corventis Inc.', '002232': 'Design Design Technology Ltd', '002224': 'Good Will Instrument Co., Ltd.', '00221B': 'Morega Systems', '002255': 'Cisco Systems, Inc', '00224E': 'SEEnergy Corp.', '00220F': 'MoCA (Multimedia over Coax Alliance)', '00225A': 'Garde Security AB', '0021AC': 'Infrared Integrated Systems Ltd', '0021A2': 'EKE-Electronics Ltd.', '0021FF': 'Cyfrowy Polsat SA', '0021F7': 'HPN Supply Chain', '0021E4': 'I-WIN', '0021E5': 'Display Solution AG', '0021E2': 'visago Systems & Controls GmbH & Co. KG', '0021F4': 'INRange Systems, Inc', '0021F5': 'Western Engravers Supply, Inc.', '0021C2': 'GL Communications Inc', '0021DD': 'Northstar Systems Corp', '0021D5': 'X2E GmbH', '0021B2': 'Fiberblaze A/S', '0021C9': 'Wavecom Asia Pacific Limited', '00214E': 'GS Yuasa Power Supply Ltd.', '00214A': 'Pixel Velocity, Inc', '002146': 'Sanmina-SCI', '002181': 'Si2 Microsystems Limited', '00217E': 'Telit Communication s.p.a', '00216E': 'Function ATI (Huizhou) Telecommunications Co., Ltd.', '00216D': 'Soltech Co., Ltd.', '00218E': 'MEKICS CO., LTD.', '00218F': 'Avantgarde Acoustic Lautsprechersysteme GmbH', '002187': 'Imacs GmbH', '002142': 'Advanced Control Systems doo', '002165': 'Presstek Inc.', '00213A': 'Winchester Systems Inc.', '00212E': 'dresden-elektronik', '00215B': 'SenseAnywhere', '002101': 'Aplicaciones Electronicas Quasar (AEQ)', '002103': 'GHI Electronics, LLC', '001FF8': 'Siemens AG, Sector Industry, Drive Technologies, Motion Control Systems', '002130': 'Keico Hightech Inc.', '002133': 'Building B, Inc', '002134': 'Brandywine Communications', '001FE6': 'Alphion Corporation', '001FFA': 'Coretree, Co, Ltd', '001FF5': 'Kongsberg Defence & Aerospace', '00211F': 'SHINSUNG DELTATECH CO.,LTD.', '002124': 'Optos Plc', '001FF2': 'VIA Technologies, Inc.', '001FF1': 'Paradox Hellas S.A.', '001FDC': 'Mobile Safe Track Ltd', '002117': 'Tellord', '00210F': 'Cernium Corp', '001F90': 'Actiontec Electronics, Inc', '001F91': 'DBS Lodging Technologies, LLC', '001F98': 'DAIICHI-DENTSU LTD.', '001F93': 'Xiotech Corporation', '001FD0': 'GIGA-BYTE TECHNOLOGY CO.,LTD.', '001FD1': 'OPTEX CO.,LTD.', '001FC9': 'Cisco Systems, Inc', '001FA9': 'Atlanta DTH, Inc.', '001FA3': 'T&W Electronics(Shenzhen)Co.,Ltd.', '001FA2': 'Datron World Communications, Inc.', '001F9C': 'LEDCO', '001FD6': 'Shenzhen Allywll', '001FCF': 'MSI Technology GmbH', '001F50': 'Swissdis AG', '001F4C': 'Roseman Engineering Ltd', '001F48': 'Mojix Inc.', '001F3E': 'RP-Technik e.K.', '001F2C': 'Starbridge Networks', '001F31': 'Radiocomp', '001F2B': 'Orange Logic', '001F2A': 'ACCM', '001F30': 'Travelping', '001F24': 'DIGITVIEW TECHNOLOGY CO., LTD.', '001F21': 'Inner Mongolia Yin An Science & Technology Development Co.,L', '001F22': 'Source Photonics, Inc.', '001F1D': 'Atlas Material Testing Technology LLC', '001F12': 'Juniper Networks', '001F15': 'Bioscrypt Inc', '001F70': 'Botik Technologies LTD', '001F6D': 'Cisco Systems, Inc', '001F51': 'HD Communications Corp', '001F53': 'GEMAC Chemnitz GmbH', '001F64': 'Beijing Autelan Technology Inc.', '001F37': 'Genesis I&C', '001E83': 'LAN/MAN Standards Association (LMSC)', '001E7C': 'Taiwick Limited', '001E78': 'Owitek Technology Ltd.,', '001E7A': 'Cisco Systems, Inc', '001E9A': 'HAMILTON Bonaduz AG', '001E94': 'SUPERCOM TECHNOLOGY CORPORATION', '001E8F': 'CANON INC.', '001EA6': 'Best IT World (India) Pvt. Ltd.', '001EA5': 'ROBOTOUS, Inc.', '001EA7': 'Actiontec Electronics, Inc', '001EA0': 'XLN-t', '001E98': 'GreenLine Communications', '001EDF': 'Master Industrialization Center Kista', '001EE3': 'T&W Electronics (ShenZhen) Co.,Ltd', '001ED9': 'Mitsubishi Precision Co.,LTd.', '001F04': 'Granch Ltd.', '001EF0': 'Gigafin Networks', '001EF2': 'Micro Motion Inc', '001ED3': "Dot Technology Int'l Co., Ltd.", '001ECF': 'PHILIPS ELECTRONICS UK LTD', '001EEA': 'Sensor Switch, Inc.', '001E27': 'SBN TECH Co.,Ltd.', '001E22': 'ARVOO Imaging Products BV', '001E1A': 'Best Source Taiwan Inc.', '001E44': 'SANTEC', '001E19': 'GTRI', '001E14': 'Cisco Systems, Inc', '001E0E': 'MAXI VIEW HOLDINGS LIMITED', '001E62': 'Siemon', '001E5C': 'RB GeneralEkonomik', '001E5D': 'Holosys d.o.o.', '001E60': 'Digital Lighting Systems, Inc', '001E6C': 'Opaque Systems', '001E2F': 'DiMoto Pty Ltd', '001E36': 'IPTE', '001DA7': 'Seamless Internet', '001DA5': 'WB Electronics', '001DA8': 'Takahata Electronics Co.,Ltd', '001DA9': 'Castles Technology, Co., LTD', '001DE8': 'Nikko Denki Tsushin Corporation(NDTC)', '001DDA': 'Mikroelektronika spol. s r. o.', '001DE3': 'Intuicom', '001DDE': 'Zhejiang Broadcast&Television Technology Co.,Ltd.', '001DE5': 'Cisco Systems, Inc', '001DB8': 'Intoto Inc.', '001DB0': 'FuJian HengTong Information Technology Co.,Ltd', '001DCB': 'Exéns Development Oy', '001DCA': 'PAV Electronics Limited', '001DA1': 'Cisco Systems, Inc', '001DFE': 'Palm, Inc', '001DF5': 'Sunshine Co,LTD', '001DF0': 'Vidient Systems, Inc.', '001DEC': 'Marusys', '001E0F': 'Briot International', '001DC2': 'XORTEC OY', '001D55': 'ZANTAZ, Inc', '001D52': 'Defzone B.V.', '001D4A': 'Carestream Health, Inc.', '001D71': 'Cisco Systems, Inc', '001D65': 'Microwave Radio Communications', '001D64': 'Adam Communications Systems Int Ltd', '001D5E': 'COMING MEDIA CORP.', '001D96': 'WatchGuard Video', '001D8F': 'PureWave Networks', '001D33': 'Maverick Systems Inc.', '001D2C': 'Wavetrend Technologies (Pty) Limited', '001D8C': 'La Crosse Technology LTD', '001D50': 'SPINETIX SA', '001D45': 'Cisco Systems, Inc', '001D36': 'ELECTRONICS CORPORATION OF INDIA LIMITED', '001D69': 'Knorr-Bremse IT-Services GmbH', '001D07': 'Shenzhen Sang Fei Consumer Communications Co.,Ltd', '001D06': 'HM Electronics, Inc.', '001D01': 'Neptune Digital', '001CF8': 'Parade Technologies, Ltd.', '001CF7': 'AudioScience', '001CF6': 'Cisco Systems, Inc', '001CF5': 'Wiseblue Technology Limited', '001D27': 'NAC-INTERCOM', '001CDE': 'Interactive Multimedia eXchange Inc.', '001CCF': 'LIMETEK', '001CCA': 'Shanghai Gaozhi Science & Technology Development Co.', '001CC9': 'Kaise Electronic Technology Co., Ltd.', '001D24': 'Aclara Power-Line Systems Inc.', '001D18': 'Power Innovation GmbH', '001D1B': 'Sangean Electronics Inc.', '001D17': 'Digital Sky Corporation', '001CC8': 'INDUSTRONIC Industrie-Electronic GmbH & Co. KG', '001CC6': 'ProStor Systems', '001CBE': 'Nintendo Co., Ltd.', '001CEE': 'SHARP Corporation', '001CE3': 'Optimedical Systems', '001CA7': 'International Quartz Limited', '001CAB': 'Meyer Sound Laboratories, Inc.', '001C9E': 'Dualtech IT AB', '001C60': 'CSP Frontier Technologies,Inc.', '001C51': 'Celeno Communications', '001C54': 'Hillstone Networks Inc', '001C59': 'DEVON IT', '001C48': 'WiDeFi, Inc.', '001C46': 'QTUM', '001C42': 'Parallels, Inc.', '001C8C': 'DIAL TECHNOLOGY LTD.', '001C93': 'ExaDigm Inc', '001C94': 'LI-COR Biosciences', '001C85': 'Eunicorn', '001C80': 'New Business Division/Rhea-Information CO., LTD.', '001C83': 'New Level Telecom Co., Ltd.', '001C76': 'The Wandsworth Group Ltd', '001C72': 'Mayer & Cie GmbH & Co KG', '001C6A': 'Weiss Engineering Ltd.', '001CAD': 'Wuhan Telecommunication Devices Co.,Ltd', '001C67': 'Pumpkin Networks, Inc.', '001BF8': 'Digitrax Inc.', '001BF1': 'Nanjing SilverNet Software Co., Ltd.', '001BEF': 'Blossoms Digital Technology Co.,Ltd.', '001BEB': 'DMP Electronics INC.', '001C0A': 'Shenzhen AEE Technology Co.,Ltd.', '001C0D': 'G-Technology, Inc.', '001C03': 'Betty TV Technology AG', '001C27': 'Sunell Electronics Co.', '001C22': 'Aeris Elettronica s.r.l.', '001C1D': 'CHENZHOU GOSPELL DIGITAL TECHNOLOGY CO.,LTD', '001C3E': 'ECKey Corporation', '001C39': 'S Netsystems Inc.', '001C37': 'Callpod, Inc.', '001BE7': 'Postek Electronics Co., Ltd.', '001BE3': 'Health Hero Network, Inc.', '001BDC': 'Vencer Co., Ltd.', '001BD5': 'Cisco Systems, Inc', '001C33': 'Sutron', '001C2F': 'Pfister GmbH', '001BF6': 'CONWISE Technology Corporation Ltd.', '001C0F': 'Cisco Systems, Inc', '001BA5': 'MyungMin Systems, Inc.', '001BA2': 'IDS Imaging Development Systems GmbH', '001B9D': 'Novus Security Sp. z o.o.', '001BC9': 'FSN DISPLAY INC', '001BC3': 'Mobisolution Co.,Ltd', '001B7A': 'Nintendo Co., Ltd.', '001B72': 'Sicep s.p.a.', '001B74': 'MiraLink Corporation', '001B6D': 'Midtronics, Inc.', '001B7F': 'TMN Technologies Telecomunicacoes Ltda', '001B7E': 'Beckmann GmbH', '001BB0': 'BHARAT ELECTRONICS', '001BAD': 'iControl Incorporated', '001BA7': 'Lorica Solutions', '001BB8': 'BLUEWAY ELECTRONIC CO;LTD', '001BB2': 'Intellect International NV', '001BCE': 'Measurement Devices Ltd', '001B95': 'VIDEO SYSTEMS SRL', '001B90': 'Cisco Systems, Inc', '001B87': 'Deepsound Tech. Co., Ltd', '001B5F': 'Alien Technology', '001B61': 'Digital Acoustics, LLC', '001B5E': 'BPL Limited', '001B5C': 'Azuretec Co., Ltd.', '001B12': 'Apprion', '001B0D': 'Cisco Systems, Inc', '001B2E': 'Sinkyo Electron Inc', '001B2D': 'Med-Eng Systems Inc.', '001B0A': 'Intelligent Distributed Controls Ltd', '001B4B': 'SANION Co., Ltd.', '001B4D': 'Areca Technology Corporation', '001B1D': 'Phoenix International Co., Ltd', '001B1A': 'e-trees Japan, Inc.', '001B34': 'Focus System Inc.', '001B3A': 'SIMS Corp.', '001AFE': 'SOFACREAL', '001B6F': 'Teletrak Ltd', '001AE4': 'Medicis Technologies Corporation', '001AEE': 'Shenztech Ltd', '001AE3': 'Cisco Systems, Inc', '001ADF': 'Interactivetv Pty Limited', '001AE1': 'EDGE ACCESS INC', '001AE8': 'Unify Software and Solutions GmbH & Co. KG', '001AE9': 'Nintendo Co., Ltd.', '001AE5': 'Mvox Technologies Inc.', '001AB1': 'Asia Pacific Satellite Industries Co., Ltd.', '001AB7': 'Ethos Networks LTD.', '001AB2': 'Cyber Solutions Inc.', '001AF1': 'Embedded Artists AB', '001AF8': 'Copley Controls Corporation', '001AF5': 'PENTAONE. CO., LTD.', '001AED': 'INCOTEC GmbH', '001ACC': 'Celestial Semiconductor, Ltd', '001AC7': 'UNIPOINT', '001AD2': 'Eletronica Nitron Ltda', '001AC0': 'JOYBIEN TECHNOLOGIES CO., LTD.', '001AC2': 'YEC Co.,Ltd.', '001AA5': 'BRN Phoenix', '001AA2': 'Cisco Systems, Inc', '001ACE': 'YUPITERU CORPORATION', '001A98': 'Asotel Communication Limited Taiwan Branch', '001A97': 'fitivision technology Inc.', '001A90': 'Trópico Sistemas e Telecomunicações da Amazônia LTDA. ', '001A94': 'Votronic GmbH', '001A5A': 'Korea Electric Power Data Network (KDN) Co., Ltd', '001A5F': 'KitWorks.fi Ltd.', '001A5D': 'Mobinnova Corp.', '001A4D': 'GIGA-BYTE TECHNOLOGY CO.,LTD.', '001A48': 'Takacom Corporation', '001A86': 'AdvancedIO Systems Inc', '001A7E': 'LN Srithai Comm Ltd.', '001A72': 'Mosart Semiconductor Corp.', '001A9C': 'RightHand Technologies, Inc.', '001A9E': 'ICON Digital International Limited', '001A68': 'Weltec Enterprise Co., Ltd.', '001A28': 'ASWT Co., LTD. Taiwan Branch H.K.', '001A2C': 'SATEC Co.,LTD', '001A27': 'Ubistar', '001A21': 'Brookhuis Applied Technologies BV', '001A35': 'BARTEC GmbH', '001A37': 'Lear Corporation', '001A38': 'Sanmina-SCI', '001A2B': 'Ayecom Technology Co., Ltd.', '0019FF': 'Finnzymes', '0019FA': 'Cable Vision Electronics CO., LTD.', '0019F1': 'Star Communication Network Technology Co.,Ltd', '0019EC': 'Sagamore Systems, Inc.', '0019EA': 'TeraMage Technologies Co., Ltd.', '001A2E': 'Ziova Coporation', '001A32': 'ACTIVA MULTIMEDIA', '001A06': 'OpVista, Inc.', '001A00': 'MATRIX INC.', '001A0B': 'BONA TECHNOLOGY INC.', '0019C4': 'Infocrypt Inc.', '0019BC': 'ELECTRO CHANCE SRL', '001993': 'Changshu Switchgear MFG. Co.,Ltd. (Former Changshu Switchgea', '0019AD': 'BOBST SA', '0019A2': 'ORDYN TECHNOLOGIES', '0019A5': 'RadarFind Corporation', '0019D5': 'IP Innovations, Inc.', '0019CC': 'RCG (HK) Ltd', '0019C8': 'AnyDATA Corporation', '00199B': 'Diversified Technical Systems, Inc.', '001990': 'ELM DATA Co., Ltd.', '00198F': 'Nokia Bell N.V.', '0019D8': 'MAXFOR', '0019A3': 'asteel electronique atlantique', '001981': 'Vivox Inc', '001929': '2M2B Montadora de Maquinas Bahia Brasil LTDA', '00192A': 'Antiope Associates', '00190F': 'Advansus Corp.', '00196E': 'Metacom (Pty) Ltd.', '001965': 'YuHua TelTech (ShangHai) Co., Ltd.', '001966': 'Asiarock Technology Limited', '00193C': 'HighPoint Technologies Incorporated', '00192E': 'Spectral Instruments, Inc.', '00192B': 'Aclara RF Systems Inc.', '001973': 'Zeugma Systems', '001975': 'Beijing Huisen networks technology Inc', '00197B': 'Picotest Corp.', '00191E': 'Beyondwiz Co., Ltd.', '00191F': 'Microlink communications Inc.', '001920': 'KUME electric Co.,Ltd.', '001926': 'BitsGen Co., Ltd.', '001945': 'RF COncepts, LLC', '001948': 'AireSpider Networks', '00195C': 'Innotech Corporation', '00195F': 'Valemount Networks Corporation', '001943': 'Belden', '001940': 'Rackable Systems', '0018B3': 'TEC WizHome Co., Ltd.', '0018AD': 'NIDEC SANKYO CORPORATION', '0018AC': 'Shanghai Jiao Da HISYS Technology Co. Ltd.', '0018AB': 'BEIJING LHWT MICROELECTRONICS INC.', '0018A5': 'ADigit Technologies Corp.', '0018F1': 'Chunichi Denshi Co.,LTD.', '0018F2': 'Beijing Tianyu Communication Equipment Co., Ltd', '001911': 'Just In Mobile Information Technologies (Shanghai) Co., Ltd.', '001917': 'Posiflex Inc.', '001918': 'Interactive Wear AG', '0018EC': 'Welding Technology Corporation', '0018ED': 'Accutech Ultrasystems Co., Ltd.', '0018BD': 'SHENZHEN DVBWORLD TECHNOLOGY CO., LTD.', '0018B2': 'ADEUNIS RF', '0018E0': 'ANAVEO', '0018CE': 'Dreamtech Co., Ltd', '0018CB': 'Tecobest Technology Limited', '001900': 'Intelliverese - DBA Voicecom', '001902': 'Cambridge Consultants Ltd', '00190B': 'Southern Vision Systems, Inc.', '001903': 'Bigfoot Networks Inc', '00189B': 'Thomson Inc.', '001894': 'NPCore, Inc.', '001898': 'KINGSTATE ELECTRONICS CORPORATION', '001892': 'ads-tec GmbH', '001847': 'AceNet Technology Inc.', '001878': 'Mackware GmbH', '00186A': 'Global Link Digital Technology Co,.LTD', '00186B': 'Sambu Communics CO., LTD.', '001877': 'Amplex A/S', '001867': 'Datalogic ADC', '001865': 'Siemens Healthcare Diagnostics Manufacturing Ltd', '001891': 'Zhongshan General K-mate Electronics Co., Ltd', '001889': 'WinNet Solutions Limited', '00186E': '3Com Ltd', '0018A6': 'Persistent Systems, LLC', '001854': 'Argard Co., Ltd', '0017CB': 'Juniper Networks', '0017D3': 'Etymotic Research, Inc.', '0017CC': 'Alcatel-Lucent', '00182C': 'Ascend Networks, Inc.', '001836': 'REJ Co.,Ltd', '0017ED': 'WooJooIT Ltd.', '0017DD': 'Clipsal Australia', '0017D9': 'AAI Corporation', '0017F9': 'Forcom Sp. z o.o.', '0017F4': 'ZERON ALLIANCE', '0017F7': 'CEM Solutions Pvt Ltd', '001805': 'Beijing InHand Networking Technology Co.,Ltd.', '00180D': 'Terabytes Server Storage Tech Corp', '001843': 'Dawevision Ltd', '00181D': 'ASIA ELECTRONICS CO.,LTD', '00181F': 'Palmmicro Communications', '00181B': 'TaiJin Metal Co., Ltd.', '0017DC': 'DAEMYUNG ZERO1', '0017AC': "O'Neil Product Development Inc.", '0017AD': 'AceNet Corporation', '00179E': 'Sirit Inc', '0017A7': 'Mobile Computing Promotion Consortium', '0017C4': 'Quanta Microsystems, INC.', '0017BF': 'Coherent Research Limited', '0017BD': 'Tibetsystem', '00176F': 'PAX Computer Technology(Shenzhen) Ltd.', '001792': 'Falcom Wireless Comunications Gmbh', '001797': 'Telsy Elettronica S.p.A.', '00178C': 'Independent Witness, Inc', '001788': 'Philips Lighting BV', '001766': 'Accense Technology, Inc.', '001764': 'ATMedia GmbH', '00179F': 'Apricorn', '0017A1': '3soft inc.', '0017B4': 'Remote Security Systems, LLC', '001771': 'APD Communications Ltd', '00173C': 'Extreme Engineering Solutions', '001736': 'iiTron Inc.', '001737': 'Industrie Dial Face S.p.A.', '001733': 'SFR', '001758': 'ThruVision Ltd', '00174F': 'iCatch Inc.', '00174A': 'SOCOMEC', '001709': 'Exalt Communications', '001703': 'MOSDAN Internation Co.,Ltd', '0016FC': 'TOHKEN CO.,LTD.', '001727': 'Thermo Ramsey Italia s.r.l.', '00172A': 'Proware Technology Corp.(By Unifosa)', '001725': 'Liquid Computing', '00171B': 'Innovation Lab Corp.', '00174E': 'Parama-tech Co.,Ltd.', '00175E': 'Zed-3', '00175B': 'ACS Solutions Switzerland Ltd.', '001729': 'Ubicod Co.LTD', '001714': 'BR Controls Nederland bv', '0016F8': 'AVIQTECH TECHNOLOGY CO., LTD.', '0016E8': 'Sigma Designs, Inc.', '0016E9': 'Tiba Medical Inc', '0016D0': 'ATech elektronika d.o.o.', '0016C3': 'BA Systems Inc', '0016C1': 'Eleksen Ltd', '0016A9': '2EI', '0016B0': 'VK Corporation', '0016B1': 'KBS', '0016AE': 'INVENTEL', '0016AC': 'Toho Technology Corp.', '001694': 'Sennheiser Communications A/S', '00169C': 'Cisco Systems, Inc', '0016DC': 'ARCHOS', '0016F0': 'Dell', '00167F': 'Bluebird Soft Inc.', '00167C': 'iRex Technologies BV', '001673': 'Bury GmbH & Co. KG', '00168C': 'DSL Partner AS', '001683': 'WEBIO International Co.,.Ltd.', '001684': 'Donjin Co.,Ltd.', '001687': 'Chubb CSC-Vendor AP', '001647': 'Cisco Systems, Inc', '001643': 'Sunhillo Corporation', '001644': 'LITE-ON Technology Corp.', '00163E': 'Xensource, Inc.', '001634': 'Mathtech, Inc.', '00161B': 'Micronet Corporation', '00162C': 'Xanboo', '00165F': 'Fairmount Automation', '001657': 'Aegate Ltd', '00164C': 'PLANET INT Co., Ltd', '001649': 'SetOne GmbH', '001665': 'Cellon France', '001618': 'HIVION Co., Ltd.', '00161E': 'Woojinnet', '00161F': 'SUNWAVETEC Co., Ltd.', '001614': 'Picosecond Pulse Labs', '00160E': 'Optica Technologies Inc.', '0015CA': 'TeraRecon, Inc.', '0015D3': 'Pantech&Curitel Communications, Inc.', '0015E7': 'Quantec Tontechnik', '0015E3': 'Dream Technologies Corporation', '0015E0': 'Ericsson', '0015B4': 'Polymap Wireless LLC', '001605': 'YORKVILLE SOUND INC.', '0015F6': 'SCIENCE AND ENGINEERING SERVICES, INC.', '0015F3': 'PELTOR AB', '0015F9': 'Cisco Systems, Inc', '0015FD': 'Complete Media Systems', '00154B': 'Wonde Proud Technology Co., Ltd', '001548': 'CUBE TECHNOLOGIES', '001570': 'Zebra Technologies Inc', '00156E': 'A. W. Communication Systems Ltd', '001568': 'Dilithium Networks', '0015AD': 'Accedian Networks', '0015AC': 'Capelon AB', '0015A9': 'KWANG WOO I&C CO.,LTD', '00155F': 'GreenPeak Technologies', '001589': 'D-MAX Technology Co.,Ltd', '00155A': 'DAINIPPON PHARMACEUTICAL CO., LTD.', '001574': 'Horizon Semiconductors Ltd.', '00158A': 'SURECOM Technology Corp.', '00158E': 'Plustek.INC', '00E0A8': 'SAT GmbH & Co.', '001598': 'Kolektor group', '00153A': 'Shenzhen Syscan Technology Co.,Ltd.', '001532': 'Consumer Technologies Group, LLC', '001512': 'Zurich University of Applied Sciences', '00150A': 'Sonoa Systems, Inc', '0014FA': 'AsGa S.A.', '0014FB': 'Technical Solutions Inc.', '0014F4': 'DekTec Digital Video B.V.', '00153C': 'Kprotech Co., Ltd.', '0014FF': 'Precise Automation, Inc.', '00151D': 'M2I CORPORATION', '0014A9': 'Cisco Systems, Inc', '00149B': 'Nokota Communications, LLC', '0014A1': 'Synchronous Communication Corp', '00149E': 'UbONE Co., Ltd', '0014AB': 'Senhai Electronic Technology Co., Ltd.', '0014B0': 'Naeil Community', '0014AD': 'Gassner Wiege- und Meßtechnik GmbH', '0014AF': 'Datasym POS Inc.', '0014F5': 'OSI Security Devices', '0014EC': 'Acro Telecom', '00148E': 'Tele Power Inc.', '00148F': 'Protronic (Far East) Ltd.', '00148C': 'General Dynamics Mission Systems', '0014D2': 'Kyuden Technosystems Corporation', '0014DC': 'Communication System Design & Manufacturing (CSDM)', '0014C1': 'U.S. Robotics Corporation', '0014C4': 'Vitelcom Mobile Technology', '0014CD': 'DigitalZone Co., Ltd.', '0014C0': 'Symstream Technology Group Ltd', '0014A2': 'Core Micro Systems Inc.', '0014EB': 'AwarePoint Corporation', '001437': 'GSTeletech Co.,Ltd.', '001431': 'PDL Electronics Ltd', '00142B': 'Edata Communication Inc.', '00142C': 'Koncept International, Inc.', '001424': 'Merry Electrics CO., LTD.', '00144D': 'Intelligent Systems', '00144A': 'Taiwan Thick-Film Ind. Corp.', '001445': 'Telefon-Gradnja d.o.o.', '001468': 'CelPlan International, Inc.', '001487': 'American Technology Integrators', '001481': 'Multilink Inc', '00145E': 'IBM Corp', '001458': 'HS Automatic ApS', '00147C': '3Com Ltd', '001444': 'Grundfos Holding', '001464': 'Cryptosoft', '001421': 'Total Wireless Technologies Pte. Ltd.', '001420': 'G-Links networking company', '001418': 'C4Line', '00141B': 'Cisco Systems, Inc', '00140F': 'Federal State Unitary Enterprise Leningrad R&D Institute of', '001407': 'Sperian Protection Instrumentation', '0013F0': 'Wavefront Semiconductor', '0013EF': 'Kingjon Digital Technology Co.,Ltd', '0013EB': 'Sysmaster Corporation', '0013C3': 'Cisco Systems, Inc', '0013BD': 'HYMATOM SA', '001406': 'Go Networks', '00140A': 'WEPIO Co., Ltd.', '0013D0': 't+ Medical Ltd', '0013D2': 'PAGE IBERICA, S.A.', '0013D1': 'KIRK telecom A/S', '0013DC': 'IBTEK INC.', '0013EC': 'Netsnapper Technologies SARL', '0013FA': 'LifeSize Communications, Inc', '0013FB': 'RKC INSTRUMENT INC.', '0013BA': 'ReadyLinks Inc', '0013B8': 'RyCo Electronic Systems Limited', '0013B6': 'Sling Media, Inc.', '001373': 'BLwave Electronics Co., Ltd', '00136F': 'PacketMotion, Inc.', '001368': 'Saab Danmark A/S', '001366': 'Neturity Technologies Inc.', '00135B': 'PanelLink Cinema, LLC', '00137D': 'Dynalab, Inc.', '001383': 'Application Technologies and Engineering Research Laboratory', '001387': '27M Technologies AB', '00135E': 'EAB/RWI/K', '00134E': 'Valox Systems, Inc.', '0013B4': 'Appear TV', '0013AE': 'Radiance Technologies, Inc.', '001397': 'Oracle Corporation ', '001391': 'OUEN CO.,LTD.', '001312': 'Amedia Networks Inc.', '001336': 'Tianjin 712 Communication Broadcasting co., ltd.', '001323': 'Cap Co., Ltd.', '001314': 'Asiamajor Inc.', '001316': 'L-S-B Broadcast Technologies GmbH', '001344': 'Fargo Electronics Inc.', '001348': 'Artila Electronics Co., Ltd.', '0012EF': 'OneAccess SA', '0012E9': 'Abbey Systems Ltd', '0012E6': 'SPECTEC COMPUTER CO., LTD.', '0012E3': 'Agat-RT, Ltd.', '001300': 'IT-FACTORY, INC.', '001322': 'DAQ Electronics, Inc.', '0012A2': 'VITA', '0012A5': 'Dolphin Interconnect Solutions AS', '0012A8': 'intec GmbH', '00129E': 'Surf Communications Inc.', '0012D5': 'Motion Reality Inc.', '0012D8': 'International Games System Co., Ltd.', '0012DB': 'ZIEHL industrie-elektronik GmbH + Co KG', '0012D6': 'Jiangsu Yitong High-Tech Co.,Ltd', '0012DA': 'Cisco Systems, Inc', '0012D3': 'Zetta Systems, Inc.', '0012BA': 'FSI Systems, Inc.', '0012B2': 'AVOLITES LTD.', '0012AE': 'HLS HARD-LINE Solutions Inc.', '0012AF': 'ELPRO Technologies', '00129A': 'IRT Electronics Pty Ltd', '00128D': 'STB Datenservice GmbH', '00128E': 'Q-Free ASA', '001292': 'Griffin Technology', '001236': 'ConSentry Networks', '001235': 'Andrew Corporation', '001245': 'Zellweger Analytics, Inc.', '001242': 'Millennial Net', '001241': 'a2i marketing center', '00123B': 'KeRo Systems ApS', '001287': 'Digital Everywhere Unterhaltungselektronik GmbH', '001282': 'Qovia', '001285': 'Gizmondo Europe Ltd', '001229': 'BroadEasy Technologies Co.,Ltd', '001226': 'Japan Direx Corporation', '001222': 'Skardin (UK) Ltd', '001277': 'Korenix Technologies Co., Ltd.', '00126D': 'University of California, Berkeley', '001267': 'Panasonic Corporation', '001265': 'Enerdyne Technologies, Inc.', '001257': 'LeapComm Communication Technologies Inc.', '001251': 'SILINK', '001202': 'Decrane Aerospace - Audio International Inc.', '0011FE': 'Keiyo System Research, Inc.', '0011FD': 'KORG INC.', '0011FA': 'Rane Corporation', '0011F3': 'NeoMedia Europe AG', '0011E9': 'STARNEX CO., LTD.', '0011EC': 'AVIX INC.', '0011E7': 'WORLDSAT - Texas de France', '0011CE': 'Ubisense Limited', '0011D0': 'Tandberg Data ASA', '0011C3': 'Transceiving System Technology Corporation', '0011C2': 'United Fiber Optic Communication', '001228': 'Data Ltd.', '00121F': 'Harding Instruments', '001220': 'Cadco Systems', '0011E8': 'Tixi.Com', '0011E0': 'U-MEDIA Communications, Inc.', '0011F2': 'Institute of Network Technologies', '00120D': 'Advanced Telecommunication Technologies, Inc.', '00120E': 'AboCom', '0011BF': 'AESYS S.p.A.', '001210': 'WideRay Corp', '0011A4': 'JStream Technologies Inc.', '001198': 'Prism Media Products Limited', '001197': 'Monitoring Technologies Limited', '001199': '2wcom Systems GmbH', '0011A7': 'Infilco Degremont Inc.', '0011A9': 'MOIMSTONE Co., LTD', '0011A3': 'LanReady Technologies Inc.', '00116B': 'Digital Data Communications Asia Co.,Ltd', '001169': 'EMS Satcom', '001162': 'STAR MICRONICS CO.,LTD.', '001161': 'NetStreams, LLC', '001176': 'Intellambda Systems, Inc.', '001177': 'Coaxial Networks, Inc.', '001156': 'Pharos Systems NZ', '001159': 'MATISSE NETWORKS INC', '00115C': 'Cisco Systems, Inc', '00115D': 'Cisco Systems, Inc', '001181': 'InterEnergy Co.Ltd,', '001194': 'Chi Mei Communication Systems, Inc.', '001170': 'GSC SRL', '0011B7': 'Octalix B.V.', '0011B9': 'Inner Range Pty. Ltd.', '001100': 'Schneider Electric', '000FFA': 'Optinel Systems, Inc.', '000FFD': 'Glorytek Network Inc.', '000FF9': 'Valcretec, Inc.', '001126': 'Venstar Inc.', '00112E': 'CEICOM', '001122': 'CIMSYS Inc', '001121': 'Cisco Systems, Inc', '001117': 'CESNET', '001116': 'COTEAU VERT CO., LTD.', '001110': 'Maxanna Technology Co., Ltd.', '001109': 'Micro-Star International', '001104': 'TELEXY', '00113B': 'Micronet Communications Inc.', '00113D': 'KN SOLTEC CO.,LTD.', '001134': 'MediaCell, Inc.', '001135': 'Grandeye Ltd', '001149': 'Proliphix Inc.', '000FBA': 'Tevebox AB', '000FB8': 'CallURL Inc.', '000FB7': 'Cavium', '000FD1': 'Applied Wireless Identifications Group, Inc.', '000FC1': 'WAVE Corporation', '000F85': 'ADDO-Japan Corporation', '000F82': 'Mortara Instrument, Inc.', '000FC4': 'NST co.,LTD.', '000FC9': 'Allnet GmbH', '000FC6': 'Eurocom Industries A/S', '000FBE': 'e-w/you Inc.', '000FC0': 'DELCOMp', '000F8A': 'WideView', '000F90': 'Cisco Systems, Inc', '000F8D': 'FAST TV-Server AG', '000FA9': 'PC Fabrik', '000FA0': 'CANON KOREA BUSINESS SOLUTIONS INC.', '000F9A': 'Synchrony, Inc.', '000F95': 'ELECOM Co.,LTD Laneed Division', '000FF7': 'Cisco Systems, Inc', '000FEA': 'Giga-Byte Technology Co.,LTD.', '000FD8': 'Force, Inc.', '000FD3': 'Digium', '000F49': 'Northover Solutions Limited', '000F4B': 'Oracle Corporation', '000F44': 'Tivella Inc.', '000F4A': 'Kyushu-kyohan co.,ltd', '000F81': 'PAL Pacific Inc.', '000F7F': 'UBSTORAGE Co.,Ltd.', '000F1A': 'Gaming Support B.V.', '000F0A': 'Clear Edge Networks', '000F5B': 'Delta Information Systems, Inc.', '000F55': 'Datawire Communication Networks Inc.', '000F56': 'Continuum Photonics Inc', '000F40': 'Optical Internetworking Forum', '000F33': 'DUALi Inc.', '000F2F': 'W-LINX TECHNOLOGY CO., LTD.', '000F2D': 'CHUNG-HSIN ELECTRIC & MACHINERY MFG.CORP.', '000F1C': 'DigitAll World Co., Ltd', '000F68': 'Vavic Network Technology, Inc.', '000F60': 'Lifetron Co.,Ltd', '000F77': 'DENTUM CO.,LTD', '000F7B': 'Arce Sistemas, S.A.', '000EF0': 'Festo AG & Co. KG', '000EE9': 'WayTech Development, Inc.', '000EEB': 'Sandmartin(zhong shan)Electronics Co.,Ltd', '000EEC': 'Orban', '000EEF': 'Private', '000EE3': 'Chiyu Technology Co.,Ltd', '000EE5': 'bitWallet, Inc.', '000EDA': 'C-TECH UNITED CORP.', '000ECF': 'PROFIBUS Nutzerorganisation e.V.', '000EC9': 'YOKO Technology Corp.', '000ED0': 'Privaris, Inc.', '000EC3': 'Logic Controls, Inc.', '000EC4': 'Iskra Transmission d.d.', '000EF1': 'EZQUEST INC.', '000EE7': 'AAC ELECTRONICS CORP.', '000EE2': 'Custom Engineering', '000EB9': 'HASHIMOTO Electronics Industry Co.,Ltd.', '000EB2': 'Micro-Research Finland Oy', '000EB5': 'Ecastle Electronics Co., Ltd.', '000EC1': 'MYNAH Technologies', '000EBD': 'Burdick, a Quinton Compny', '000F02': 'Digicube Technology Co., Ltd', '000F05': '3B SYSTEM INC.', '000EF7': 'Vulcan Portals Inc', '000EA7': 'Endace Technology', '000E95': 'Fujiya Denki Seisakusho Co.,Ltd.', '000E97': 'Ultracker Technology CO., Inc', '000EA1': 'Formosa Teletek Corporation', '000E98': 'HME Clear-Com LTD.', '000E99': 'Spectrum Digital, Inc', '000E89': 'CLEMATIC', '000E79': 'Ample Communications Inc.', '000E4B': 'atrium c and i', '000E3F': 'Soronti, Inc.', '000E45': 'Beijing Newtry Electronic Technology Ltd', '000E4F': 'Trajet GmbH', '000E50': 'Thomson Telecom Belgium', '000E6F': 'IRIS Corporation Berhad', '000E82': 'Commtech Wireless', '000E38': 'Cisco Systems, Inc', '000E36': 'HEINESYS, Inc.', '000E2B': 'Safari Technologies', '000E17': 'Private', '000E15': 'Tadlys LTD', '000E1D': 'ARION Technology Inc.', '000E04': 'CMA/Microdialysis AB', '000DE8': 'Nasaco Electronics Pte. Ltd', '000DE9': 'Napatech Aps', '000DE6': 'YOUNGBO ENGINEERING CO.,LTD', '000DDB': 'AIRWAVE TECHNOLOGIES INC.', '000DCB': 'Petcomkorea Co., Ltd.', '000DDF': 'Japan Image & Network Inc.', '000E28': 'Dynamic Ratings P/L', '000E24': 'Huwell Technology Inc.', '000E12': 'Adaptive Micro Systems Inc.', '000E01': 'ASIP Technologies Inc.', '000DCC': 'NEOSMART Corp.', '000DCE': 'Dynavac Technology Pte Ltd', '000DC3': 'First Communication, Inc.', '000DB7': 'SANKO ELECTRIC CO,.LTD', '000DAC': 'Japan CBM Corporation', '000D95': 'Opti-cell, Inc.', '000D91': 'Eclipse (HQ Espana) S.L.', '000DB9': 'PC Engines GmbH', '000DA1': 'MIRAE ITS Co.,LTD.', '000D62': 'Funkwerk Dabendorf GmbH', '000D80': 'Online Development Inc', '000D41': 'Siemens AG ICM MP UC RD IT KLF1', '000D42': 'Newbest Development Limited', '000D3C': 'i.Tech Dynamic Ltd', '000D39': 'Network Electronics', '000D13': 'Wilhelm Rutenbeck GmbH&Co.KG', '000D19': 'ROBE Show lighting', '000D1C': 'Amesys Defense', '000D16': 'UHS Systems Pty Ltd', '000D29': 'Cisco Systems, Inc', '000D26': 'Primagraphics Limited', '000D35': 'PAC International Ltd', '000D2A': 'Scanmatic AS', '000D57': 'Fujitsu I-Network Systems Limited.', '000D58': 'Private', '000D51': 'DIVR Systems, Inc.', '000D45': 'Tottori SANYO Electric Co., Ltd.', '000D46': 'Parker SSD Drives', '000D65': 'Cisco Systems, Inc', '000D55': 'SANYCOM Technology Co.,Ltd', '000D0C': 'MDI Security Systems', '000D1F': 'AV Digital', '000CAA': 'Cubic Transportation Systems Inc', '000CAC': 'Citizen Watch Co., Ltd.', '000CAE': 'Ailocom Oy', '000CA3': 'Rancho Technology, Inc.', '000CED': 'Real Digital Media', '000CF0': 'M & N GmbH', '000CF4': 'AKATSUKI ELECTRIC MFG.CO.,LTD.', '000CF3': 'CALL IMAGE SA', '000CE0': 'Trek Diagnostics Inc.', '000CD1': 'SFOM Technology Corp.', '000CCE': 'Cisco Systems, Inc', '000CCF': 'Cisco Systems, Inc', '000CD8': 'M. K. Juchheim GmbH & Co', '000CBA': 'Jamex, Inc.', '000D01': 'P&E Microcomputer Systems, Inc.', '000D00': 'Seaway Networks Inc.', '000CF9': 'Xylem Water Solutions', '000CB7': 'Nanjing Huazhuo Electronics Co., Ltd.', '000CC6': 'Ka-Ro electronics GmbH', '000CC4': 'Tiptel AG', '000CAD': 'BTU International', '000CE2': 'Rolls-Royce', '000C56': 'Megatel Computer (1986) Corp.', '000C57': 'MACKIE Engineering Services Belgium BVBA', '000C97': 'NV ADB TTV Technologies SA', '000C8F': 'Nergal s.r.l.', '000C92': 'WolfVision Gmbh', '000C93': 'Xeline Co., Ltd.', '000C7F': 'synertronixx GmbH', '000C81': 'Schneider Electric (Australia) ', '000C4A': 'Cygnus Microsystems (P) Limited', '000C40': 'Altech Controls', '000C9E': 'MemoryLink Corp.', '000C95': 'PrimeNet', '000C9C': 'Chongho information & communications', '000C76': 'MICRO-STAR INTERNATIONAL CO., LTD.', '000C80': 'Opelcomm Inc.', '000C7D': 'TEIKOKU ELECTRIC MFG. CO., LTD', '000C54': 'Pedestal Networks, Inc', '000C68': 'SigmaTel, Inc.', '000BE5': 'HIMS International Corporation', '000BE9': 'Actel Corporation', '000BE3': 'Key Stream Co., Ltd.', '000BE8': 'AOIP', '000C0C': 'APPRO TECHNOLOGY INC.', '000BF9': 'Gemstone Communications, Inc.', '000C0E': 'XtremeSpectrum, Inc.', '000C12': 'Micro-Optronic-Messtechnik GmbH', '000C10': 'PNI Corporation', '000C0A': 'Guangdong Province Electronic Technology Research Institute', '000C0B': 'Broadbus Technologies', '000C07': 'Iftest AG', '000C3E': 'Crest Audio', '000C3A': 'Oxance', '000C35': 'KaVo Dental GmbH & Co. KG', '000C37': 'Geomation, Inc.', '000C23': 'Beijing Lanchuan Tech. Co., Ltd.', '000BEA': 'Zultys Technologies', '000BDD': 'TOHOKU RICOH Co., LTD.', '000C25': 'Allied Telesis Labs, Inc. ', '000C18': 'Zenisu Keisoku Inc.', '000BFC': 'Cisco Systems, Inc', '000BDC': 'AKCP', '000BD8': 'Industrial Scientific Corp.', '000BD7': 'DORMA Time + Access GmbH', '000BD3': 'cd3o', '000BD5': 'Nvergence, Inc.', '000B95': 'eBet Gaming Systems Pty Ltd', '000B9E': 'Yasing Technology Corp.', '000B88': 'Vidisco ltd.', '000B8C': 'Flextronics', '000B8A': 'MITEQ Inc.', '000BB8': 'Kihoku Electronic Co.', '000BD1': 'Aeronix, Inc.', '000BD2': 'Remopro Technology Inc.', '000BC7': 'ICET S.p.A.', '000BCC': 'JUSAN, S.A.', '000BC0': 'China IWNComm Co., Ltd.', '000BB0': 'Sysnet Telematica srl', '000BB4': 'RDC Semiconductor Inc.,', '000B90': 'ADVA Optical Networking Ltd.', '000B79': 'X-COM, Inc.', '000B87': 'American Reliance Inc.', '000B75': 'Iosoft Ltd.', '000B70': 'Load Technology, Inc.', '000BAC': '3Com Ltd', '000B9F': 'Neue ELSA GmbH', '000B98': 'NiceTechVision', '000B54': 'BiTMICRO Networks, Inc.', '000B43': 'Microscan Systems, Inc.', '000B45': 'Cisco Systems, Inc', '000B4C': 'Clarion (M) Sdn Bhd', '000B4B': 'VISIOWAVE SA', '000B36': 'Productivity Systems, Inc.', '000B3C': 'Cygnal Integrated Products, Inc.', '000B27': 'Scion Corporation', '000B35': 'Quad Bit System co., Ltd.', '000B3F': 'Anthology Solutions Inc.', '000B21': 'G-Star Communications Inc.', '000B25': 'Aeluros', '000B1A': 'Industrial Defender, Inc.', '000B18': 'Private', '000B15': 'Platypus Technology', '000B0D': 'Air2U, Inc.', '000B6F': 'Media Streaming Networks Inc', '000B63': 'Kaleidescape', '000B55': 'ADInstruments', '000B68': 'Addvalue Communications Pte Ltd', '000B58': 'Astronautics C.A LTD', '000AC0': 'Fuyoh Video Industry CO., LTD.', '000AAE': 'Rosemount Process Analytical', '000AB3': 'Fa. GIRA', '000B0B': 'Corrent Corporation', '000B08': 'Pillar Data Systems', '000AFA': 'Traverse Technologies Australia', '000AC1': 'Futuretel', '000ABE': 'OPNET Technologies CO., LTD.', '000AC3': 'eM Technics Co., Ltd.', '000AC4': 'Daewoo Teletech Co., Ltd', '000ABA': 'Arcon Technology Limited', '000AB6': 'COMPUNETIX, INC', '000AAF': 'Pipal Systems', '000AB2': 'Fresnel Wireless Systems', '000AE7': 'ELIOP S.A.', '000AE8': 'Cathay Roxus Information Technology Co. LTD', '000ADD': 'Allworx Corp.', '000AF1': 'Clarity Design, Inc.', '000AF3': 'Cisco Systems, Inc', '000AEC': 'Koatsu Gas Kogyo Co., Ltd.', '000AE1': 'EG Technology', '000ADF': 'Gennum Corporation', '000ADA': 'Vindicator Technologies', '000AC9': 'Zambeel Inc', '000AFC': 'Core Tec Communications, LLC', '000A8F': 'Aska International Inc.', '000AA0': 'Cedar Point Communications', '000A8E': 'Invacom Ltd', '000A99': 'Calamp Wireless Networks Inc', '000A76': 'Beida Jade Bird Huaguang Technology Co.,Ltd', '000A60': 'Autostar Technology Pte Ltd', '000A5A': 'GreenNET Technologies Co.,Ltd.', '000A56': 'HITACHI Maxell Ltd.', '000A51': 'GyroSignal Technology Co., Ltd.', '000A53': 'Intronics, Incorporated', '000AAA': 'AltiGen Communications Inc.', '000A8D': 'EUROTHERM LIMITED', '000A9F': 'Pannaway Technologies, Inc.', '000A4A': 'Targa Systems Ltd.', '000A39': 'LoPA Information Technology', '000A37': 'Procera Networks, Inc.', '000A93': 'W2 Networks, Inc.', '000A89': 'Creval Systems, Inc.', '000A5D': 'FingerTec Worldwide Sdn Bhd', '000A5C': 'Carel s.p.a.', '000A79': 'corega K.K', '000A80': 'Telkonet Inc.', '000A2A': 'QSI Systems Inc.', '000A23': 'Parama Networks Inc', '000A1F': 'ART WARE Telecommunication Co., Ltd.', '0009E3': 'Angel Iglesias S.A.', '0009D6': 'KNC One GmbH', '0009F4': 'Alcon Laboratories, Inc.', '0009F5': 'Emerson Network Power Co.,Ltd', '0009EA': 'YEM Inc.', '000A15': 'Silicon Data, Inc', '000A1B': 'Stream Labs', '000A1A': 'Imerge Ltd', '000A0A': 'SUNIX Co., Ltd.', '000A05': 'Widax Corp.', '000A04': '3Com Ltd', '000A34': 'Identicard Systems Incorporated', '000A30': 'Visteon Corporation', '000A2F': 'Artnix Inc.', '000A2C': 'Active Tchnology Corporation', '0009F1': 'Yamaki Electric Corporation', '0009C7': 'Movistec', '0009C9': 'BlueWINC Co., Ltd.', '0009D4': 'Transtech Networks', '0009CB': 'HBrain', '0009C5': 'KINGENE Technology Corporation', '0009CD': 'HUDSON SOFT CO.,LTD.', '0009C0': '6WIND', '0009BF': 'Nintendo Co., Ltd.', '00097E': 'IMI TECHNOLOGY CO., LTD', '0009B4': 'KISAN TELECOM CO., LTD.', '0009AE': 'OKANO ELECTRIC CO.,LTD', '000991': 'GE Fanuc Automation Manufacturing, Inc.', '000996': 'RDI', '00098B': 'Entropic Communications, Inc.', '0009BA': 'MAKU Informationstechik GmbH', '000921': 'Planmeca Oy', '000919': 'MDS Gateways', '000918': 'SAMSUNG TECHWIN CO.,LTD', '000910': 'Simple Access Inc.', '00092F': 'Akom Technology Corporation', '00091F': 'A&D Co., Ltd.', '000924': 'Telebau GmbH', '000947': 'Aztek, Inc.', '00093D': 'Newisys,Inc.', '000951': 'Apogee Imaging Systems', '00094E': 'BARTECH SYSTEMS INTERNATIONAL, INC', '000971': 'Time Management, Inc.', '000974': 'Innopia Technologies, Inc.', '000967': 'Tachyon, Inc', '00096A': 'Cloverleaf Communications Inc.', '00095A': 'RACEWOOD TECHNOLOGY', '000938': 'Allot Communications', '0008C1': 'Avistar Communications Corporation', '0008C6': 'Philips Consumer Communications', '0008BF': 'Aptus Elektronik AB', '0008B8': 'E.F. Johnson', '00090D': 'LEADER ELECTRONICS CORP.', '0008FE': 'UNIK C&C Co.,Ltd.', '0008FF': 'Trilogy Communications Ltd', '000904': 'MONDIAL electronic', '0008BB': 'NetExcell', '0008BE': 'XENPAK MSA Group', '0008FC': 'Gigaphoton Inc.', '0008EF': 'DIBAL,S.A.', '0008F0': 'Next Generation Systems, Inc.', '0008A8': 'Systec Co., Ltd.', '0008E7': 'SHI ControlSystems,Ltd.', '0008D7': 'HOW CORPORATION', '000914': 'COMPUTROLS INC.', '00090C': 'Mayekawa Mfg. Co. Ltd.', '0008A4': 'Cisco Systems, Inc', '0008E9': 'NextGig', '000807': 'Access Devices Limited', '0007FC': 'Adept Systems Inc.', '0007E8': 'EdgeWave', '000865': 'JASCOM CO., LTD', '000864': 'Fasy S.p.A.', '000859': 'ShenZhen Unitone Electronics Co., Ltd.', '00084E': 'DivergeNet, Inc.', '000819': 'Banksys', '00081A': 'Sanrad Intelligence Storage Communications (2000) Ltd.', '000810': 'Key Technology, Inc.', '000897': 'Quake Technologies', '000890': 'AVILINKS SA', '00088D': 'Sigma-Links Inc.', '000860': 'LodgeNet Entertainment Corp.', '000851': 'Canadian Bank Note Company, Ltd.', '000825': 'Acme Packet', '00081F': 'Pou Yuen Tech Corp. Ltd.', '08006B': 'ACCEL TECHNOLOGIES INC.', '000884': 'Index Braille AB', '00079E': 'Ilinx Co., Ltd.', '0007A0': 'e-Watch Inc.', '000794': 'Simple Devices, Inc.', '000797': 'Netpower Co., Ltd.', '0007C6': 'VDS Vosskuhler GmbH', '0007CC': 'Kaba Benzing GmbH', '0007BC': 'Identix Inc.', '0007EB': 'Cisco Systems, Inc', '0007EC': 'Cisco Systems, Inc', '0007F5': 'Bridgeco Co AG', '00078C': 'Elektronikspecialisten i Borlange AB', '0007A9': 'Novasonics', '0007A1': 'VIASYS Healthcare GmbH', '0007C0': 'NetZerver Inc.', '00047E': 'Siqura B.V.', '0007B8': 'Corvalent Corporation', '0007DC': 'Atek Co, Ltd.', '00071E': 'Tri-M Engineering / Nupak Dev. Corp.', '000717': 'Wieland Electric GmbH', '000711': 'Acterna', '00070A': 'Unicom Automation Co., Ltd.', '000733': 'DANCONTROL Engineering', '00072B': 'Jung Myung Telecom Co., Ltd.', '000718': 'iCanTek Co., Ltd.', '000716': 'J & S Marine Ltd.', '00071A': 'Finedigital Inc.', '000748': 'The Imaging Source Europe', '000746': 'TURCK, Inc.', '000741': 'Sierra Automated Systems', '000768': 'Danfoss A/S', '00075F': 'VCS Video Communication Systems AG', '00075C': 'Eastman Kodak Company', '000788': 'Clipcomm, Inc.', '000781': 'Itron Inc.', '00077B': 'Millimetrix Broadband Networks', '000745': 'Radlan Computer Communications Ltd.', '00073E': 'China Great-Wall Computer Shenzhen Co., Ltd.', '000756': 'Juyoung Telecom', '00075A': 'Air Products and Chemicals, Inc.', '00073B': 'Tenovis GmbH & Co KG', '000731': 'Ophir-Spiricon LLC', '000769': 'Italiana Macchi SpA', '00076B': 'Stralfors AB', '0006D9': 'IPM-Net S.p.A.', '0006D3': 'Alpha Telecom, Inc. U.S.A.', '000647': 'Etrali S.A.', '0006A4': 'INNOWELL Corp.', '0006D6': 'Cisco Systems, Inc', '000702': 'Varex Imaging', '000705': 'Endress & Hauser GmbH & Co', '0006CB': 'Jotron Electronics A/S', '0006CD': 'Leaf Imaging Ltd.', '0006B9': 'A5TEK Corp.', '0006B3': 'Diagraph Corporation', '0006E3': 'Quantitative Imaging Corporation', '0006E4': 'Citel Technologies Ltd.', '0006EB': 'Global Data', '0006FF': 'Sheba Systems Co., Ltd.', '0006C7': 'RFNET Technologies Pte Ltd (S)', '000709': 'Westerstrand Urfabrik AB', '000663': 'Human Technology Co., Ltd.', '000665': 'Sunny Giken, Inc.', '000669': 'Datasound Laboratories Ltd', '00066E': 'Delta Electronics, Inc.', '000662': 'MBM Technology Ltd.', '000692': 'Intruvert Networks, Inc.', '00068B': 'AirRunner Technologies, Inc.', '000688': 'Telways Communication Co., Ltd.', '000687': 'Omnitron Systems Technology, Inc.', '000694': 'Mobillian Corporation', '000695': 'Ensure Technologies, Inc.', '000691': 'PT Inovacao', '0006A1': 'Celsian Technologies, Inc.', '0006AB': 'W-Link Systems, Inc.', '0006AC': 'Intersoft Co.', '00065A': 'Strix Systems', '000652': 'Cisco Systems, Inc', '000656': 'Tactel AB', '000641': 'ITCN', '000648': 'Seedsware, Inc.', '000674': 'Spectrum Control, Inc.', '0005C5': 'Flaga HF', '0005CA': 'Hitron Technology, Inc.', '0005D2': 'DAP Technologies', '0005C0': 'Digital Network Alacarte Co., Ltd.', '0005B8': 'Electronic Design Associates, Inc.', '00060F': 'Narad Networks Inc', '000610': 'Abeona Networks Inc', '000611': 'Zeus Wireless, Inc.', '0005EC': 'Mosaic Systems Inc.', '0005D1': 'Metavector Technologies', '0005C9': 'LG Innotek Co., Ltd.', '0005D5': 'Speedcom Wireless', '00061A': 'Zetari Inc.', '00061F': 'Vision Components GmbH', '00061B': 'Notebook Development Lab. Lenovo Japan Ltd.', '00064C': 'Invicta Networks, Inc.', '000638': 'Sungjin C&C Co., Ltd.', '0005FB': 'ShareGate, Inc.', '0005FE': 'Traficon N.V.', '000635': 'PacketAir Networks, Inc.', '000602': 'Cirkitech Electronics Co.', '0005F0': 'SATEC', '000581': 'Snell', '00057D': 'Sun Communications, Inc.', '000586': 'Lucent Technologies', '00057B': 'Chung Nam Electronic Co., Ltd.', '000599': 'DRS Test and Energy Management or DRS-TEM', '00059A': 'Cisco Systems, Inc', '0005AB': 'Cyber Fone, Inc.', '0005BA': 'Area Netwoeks, Inc.', '0005BF': 'JustEzy Technology, Inc.', '0005AC': 'Northern Digital, Inc.', '00058B': 'IPmental, Inc.', '000571': 'Seiwa Electronics Co.', '000577': 'SM Information & Communication', '000570': 'Baydel Ltd.', '0005AD': 'Topspin Communications, Inc.', '0005B1': 'ASB Technology BV', '000592': 'Pultek Corp.', '00056E': 'National Enhance Technology, Inc.', '00056D': 'Pacific Corporation', '00055C': 'Kowa Company, Ltd.', '000513': 'VTLinx Multimedia Systems, Inc.', '00050E': '3ware, Inc.', '000510': 'Infinite Shanghai Communication Terminals Ltd.', '0004FF': 'Acronet Co., Ltd.', '000501': 'Cisco Systems, Inc', '000508': 'Inetcam, Inc.', '000504': 'Naray Information & Communication Enterprise', '000509': 'AVOC Nishimura Ltd.', '0004FB': 'Commtech, Inc.', '000515': 'Nuark Co., Ltd.', '000516': 'SMART Modular Technologies', '00051B': 'Magic Control Technology Corporation', '0004DD': 'Cisco Systems, Inc', '008086': 'COMPUTER GENERATION INC.', '0004D7': 'Omitec Instrumentation Ltd.', '0004E9': 'Infiniswitch Corporation', '0004E8': 'IER, Inc.', '0004DB': 'Tellus Group Corp.', '0004E0': 'Procket Networks', '000543': 'IQ Wireless GmbH', '00052B': 'HORIBA, Ltd.', '00051D': 'Airocon, Inc.', '000549': 'Salira Optical Network Systems', '00054C': 'RF Innovations Pty Ltd', '000530': 'Andiamo Systems, Inc.', '000538': 'Merilus, Inc.', '00048C': 'Nayna Networks, Inc.', '000491': 'Technovision, Inc.', '000493': 'Tsinghua Unisplendour Co., Ltd.', '000494': 'Breezecom, Ltd.', '000489': 'YAFO Networks, Inc.', '00048A': 'Temia Vertriebs GmbH', '000481': 'Econolite Control Products, Inc.', '000473': 'Photonex Corporation', '000470': 'ipUnplugged AB', '00046C': 'Cyber Technology Co., Ltd.', '000471': 'IPrad', '00046E': 'Cisco Systems, Inc', '0004D4': 'Proview Electronics Co., Ltd.', '0004CD': 'Extenway Solutions Inc', '0004BA': 'KDD Media Will Corporation', '0004B6': 'Stratex Networks, Inc.', '0004B3': 'Videotek, Inc.', '000474': 'LEGRAND', '00045D': 'BEKA Elektronik', '000459': 'Veristar Corporation', '000492': 'Hive Internet, Ltd.', '000477': 'Scalant Systems, Inc.', '0004C0': 'Cisco Systems, Inc', '0004A4': 'NetEnabled, Inc.', '000433': 'Cyberboard A/S', '000434': 'Accelent Systems, Inc.', '00042D': 'Sarian Systems, Ltd.', '00042E': 'Netous Technologies, Ltd.', '000439': 'Rosco Entertainment Technology, Inc.', '00043A': 'Intelligent Telecommunications, Inc.', '00043F': 'ESTeem Wireless Modems, Inc', '00040A': 'Sage Systems', '000404': 'Makino Milling Machine Co., Ltd.', '000405': 'ACN Technologies', '0003F0': 'Redfern Broadband Networks', '0003EB': 'Atrica', '0003E7': 'Logostek Co. Ltd.', '000401': 'Osaki Electric Co., Ltd.', '0003F4': 'NetBurner', '0003F2': 'Seneca Networks', '000416': 'Parks S/A Comunicacoes Digitais', '00040F': 'Asus Network Technologies, Inc.', '0003E3': 'Cisco Systems, Inc', '0003DB': 'Apogee Electronics Corp.', '000425': 'Atmel Corporation', '00041B': 'Bridgeworks Ltd.', '00044C': 'JENOPTIK', '000444': 'Western Multiplex Corporation', '0003A7': 'Unixtar Technology, Inc.', '0003AE': 'Allied Advanced Manufacturing Pte, Ltd.', '0003A3': 'MAVIX, Ltd.', '0003A1': 'HIPER Information & Communication, Inc.', '000396': 'EZ Cast Co., Ltd.', '00036E': 'Nicon Systems (Pty) Limited', '000371': 'Acomz Networks Corp.', '000363': 'Miraesys Co., Ltd.', '00035F': 'Prüftechnik Condition Monitoring GmbH & Co. KG', '000360': 'PAC Interactive Technology, Inc.', '00038A': 'America Online, Inc.', '00038D': 'PCS Revenue Control Systems, Inc.', '000388': 'Fastfame Technology Co., Ltd.', '0003B6': 'QSI Corporation', '0003B1': 'Hospira Inc.', '0003B3': 'IA Link Systems Co., Ltd.', '0003AD': 'Emerson Energy Systems AB', '000378': 'HUMAX Co., Ltd.', '000374': 'Control Microsystems', '000376': 'Graphtec Technology, Inc.', '00036D': 'Runtop, Inc.', '0003D2': 'Crossbeam Systems, Inc.', '0003D0': 'KOANKEISO Co., Ltd.', '00039A': 'SiConnect', '000394': 'Connect One', '0003CF': 'Muxcom, Inc.', '0003D1': 'Takaya Corporation', '00037E': 'PORTech Communications, Inc.', '00031F': 'Condev Ltd.', '00029F': 'L-3 Communication Aviation Recorders', '00031B': 'Cellvision Systems, Inc.', '00031C': 'Svenska Hardvarufabriken AB', '0001A8': 'Welltech Computer Co., Ltd.', '000317': 'Merlin Systems, Inc.', '000318': 'Cyras Systems, Inc.', '000307': 'Secure Works, Inc.', '000306': 'Fusion In Tech Co., Ltd.', '0002FC': 'Cisco Systems, Inc', '0002FA': 'DX Antenna Co., Ltd.', '000361': 'Widcomm, Inc.', '000359': 'DigitalSis', '000352': 'Colubris Networks', '00030C': 'Telesoft Technologies Ltd.', '000308': 'AM Communications, Inc.', '0002FB': 'Baumuller Aulugen-Systemtechnik GmbH', '0002F6': 'Equipe Communications', '00034C': 'Shanghai DigiVision Technology Co., Ltd.', '000349': 'Vidicode Datacommunicatie B.V.', '000324': 'SANYO Consumer Electronics Co., Ltd.', '000325': 'Arima Computer Corp.', '00033C': 'Daiden Co., Ltd.', '00032A': 'UniData Communication Systems, Inc.', '00033E': 'Tateyama System Laboratory Co., Ltd.', '000280': 'Mu Net, Inc.', '009064': 'Thomson Inc.', '00027A': 'IOI Technology Corporation', '000289': 'DNE Technologies', '000283': 'Spectrum Controls, Inc.', '000284': 'UK Grid Solutions Limited', '00029B': 'Kreatel Communications AB', '000295': 'IP.Access Limited', '000293': 'Solid Data Systems', '0002CB': 'TriState Ltd.', '0002CA': 'EndPoints, Inc.', '0002C4': 'Vector International BVBA', '000274': 'Tommy Technologies Corp.', '000272': 'CC&C Technologies, Inc.', '0002A7': 'Vivace Networks', '00029E': 'Information Equipment Co., Ltd.', '0002E3': 'LITE-ON Communications, Inc.', '0002DD': 'Bromax Communications, Ltd.', '0002BF': 'dotRocket, Inc.', '000223': 'ClickTV', '00020A': 'Gefran Spa', '000206': 'Telital R&D Denmark A/S', '000209': 'Shenzhen SED Information Technology Co., Ltd.', '000202': 'Amino Communications, Ltd.', '000201': 'IFM Electronic gmbh', '00026D': 'Adept Telecom', '00026B': 'BCM Computers Co., Ltd.', '000266': 'Thermalogic Corporation', '000268': 'Harris Government Communications', '00025E': 'High Technology Ltd', '00012F': 'Twinhead International Corp', '000247': 'Great Dragon Information Technology (Group) Co., Ltd.', '000243': 'Raysis Co., Ltd.', '000231': 'Ingersoll-Rand', '000234': 'Imperial Technology, Inc.', '000236': 'INIT GmbH', '00021F': 'Aculab PLC', '00021A': 'Zuma Networks', '000222': 'Chromisys, Inc.', '00022B': 'SAXA, Inc.', '000224': 'C-COR', '0001EF': 'Camtel Technology Corp.', '000260': 'Accordion Networks, Inc.', '000213': 'S.D.E.L.', '0001CA': 'Geocast Network Systems, Inc.', '0001D1': 'CoNet Communications, Inc.', '0001B2': 'Digital Processing Systems, Inc.', '0001B8': 'Netsensity, Inc.', '0001B9': 'SKF (U.K.) Limited', '0001B3': 'Precision Electronic Manufacturing', '0001A4': 'Microlink Corporation', '00018E': 'Logitec Corporation', '000179': 'WIRELESS TECHNOLOGY, INC.', '000160': 'ELMEX Co., LTD.', '00014E': 'WIN Enterprises, Inc.', '003073': 'International Microsystems, In', '00303F': 'TurboComm Tech Inc.', '0001BD': 'Peterson Electro-Musical Products, Inc.', '0001B7': 'Centos, Inc.', '0001B5': 'Turin Networks, Inc.', '00016D': 'CarrierComm Inc.', '00016F': 'Inkel Corp.', '000170': "ESE Embedded System Engineer'g", '00016A': 'ALITEC', '000165': 'AirSwitch Corporation', '00017E': 'ADTEK System Science Co., Ltd.', '000183': 'ANITE TELECOMS', '0001DB': 'Freecom Technologies GmbH', '0001DF': 'ISDN Communications, Ltd.', '000184': 'SIEB & MEYER AG', '000195': 'Sena Technologies, Inc.', '000107': 'Leiser GmbH', '000114': 'KANDA TSUSHIN KOGYO CO., LTD.', '00010B': 'Space CyberLink, Inc.', '00B0DB': 'Nextcell, Inc.', '00B0AE': 'Symmetricom', '00B0E7': 'British Federal Ltd.', '00B08E': 'Cisco Systems, Inc', '000156': 'FIREWIREDIRECT.COM, INC.', '000153': 'ARCHTEK TELECOM CORPORATION', '000135': 'KDC Corp.', '000128': 'EnjoyWeb, Inc.', '00010C': 'System Talks Inc.', '000111': 'iDigm Inc.', '000141': 'CABLE PRINT', '000146': 'Tesco Controls, Inc.', '000149': 'TDT AG', '000118': 'EZ Digital Co., Ltd.', '00011C': 'Universal Talkware Corporation', '00B0EC': 'EACEM', '00B0DF': 'Starboard Storage Systems', '00B0F5': 'NetWorth Technologies, Inc.', '000131': 'Bosch Security Systems, Inc.', '003022': 'Fong Kai Industrial Co., Ltd.', '003070': '1Net Corporation', '0030F8': 'Dynapro Systems, Inc.', '0030B7': 'Teletrol Systems, Inc.', '0030B3': 'San Valley Systems, Inc.', '003009': 'Tachion Networks, Inc.', '00307A': 'Advanced Technology & Systems', '003061': 'MobyTEL', '00308F': 'MICRILOR, Inc.', '00309C': 'Timing Applications, Inc.', '00307E': 'Redflex Communication Systems', '00304F': 'PLANET Technology Corporation', '003004': 'LEADTEK RESEARCH INC.', '0030F9': 'Sollae Systems Co., Ltd.', '003002': 'Expand Networks', '003078': 'Cisco Systems, Inc', '0030B1': 'TrunkNet', '0030E0': 'OXFORD SEMICONDUCTOR LTD.', '003064': 'ADLINK TECHNOLOGY, INC.', '003050': 'Versa Technology', '0030C0': 'Lara Technology, Inc.', '00300B': 'mPHASE Technologies, Inc.', '0030BE': 'City-Net Technology, Inc.', '00305C': 'SMAR Laboratories Corp.', '003005': 'Fujitsu Siemens Computers', '0030C7': 'Macromate Corp.', '0030E4': 'CHIYODA SYSTEM RIKEN', '003066': 'RFM', '003031': 'LIGHTWAVE COMMUNICATIONS, INC.', '003060': 'Powerfile, Inc.', '0030A0': 'TYCO SUBMARINE SYSTEMS, LTD.', '003015': 'CP CLARE CORP.', '003000': 'ALLWELL TECHNOLOGY CORP.', '003011': 'HMS Industrial Networks', '0030FA': 'TELICA, INC.', '00304B': 'ORBACOM SYSTEMS, INC.', '0030E9': "GMA COMMUNICATION MANUFACT'G", '0030A5': 'ACTIVE POWER', '003084': 'ALLIED TELESYN INTERNAIONAL', '003029': 'OPICOM', '003083': 'Ivron Systems', '0030B6': 'Cisco Systems, Inc', '0030E7': 'CNF MOBILE SOLUTIONS, INC.', '0030B4': 'INTERSIL CORP.', '003043': 'IDREAM TECHNOLOGIES, PTE. LTD.', '00301F': 'OPTICAL NETWORKS, INC.', '003068': 'CYBERNETICS TECH. CO., LTD.', '003091': 'TAIWAN FIRST LINE ELEC. CORP.', '003054': 'CASTLENET TECHNOLOGY, INC.', '0030DB': 'Mindready Solutions, Inc.', '00D0F7': 'NEXT NETS CORPORATION', '00D003': 'COMDA ENTERPRISES CORP.', '00D0D2': 'EPILOG CORPORATION', '00D03E': 'ROCKETCHIPS, INC.', '00D093': 'TQ - COMPONENTS GMBH', '00D03F': 'AMERICAN COMMUNICATION', '0030CD': 'CONEXANT SYSTEMS, INC.', '00301A': 'SMARTBRIDGES PTE. LTD.', '00305B': 'Toko Inc.', '00D025': 'XROSSTECH, INC.', '00D044': 'ALIDIAN NETWORKS, INC.', '00D018': 'QWES. COM, INC.', '00D0FF': 'Cisco Systems, Inc', '00D028': 'Harmonic, Inc', '00D0F9': 'ACUTE COMMUNICATIONS CORP.', '00D0CE': 'iSystem Labs ', '00D077': 'LUCENT TECHNOLOGIES', '00D007': 'MIC ASSOCIATES, INC.', '00D06F': 'KMC CONTROLS', '00D027': 'APPLIED AUTOMATION, INC.', '00D0A6': 'LANBIRD TECHNOLOGY CO., LTD.', '00D049': 'IMPRESSTEK CO., LTD.', '00D05B': 'ACROLOOP MOTION CONTROL', '00D042': 'MAHLO GMBH & CO. UG', '00D0DD': 'SUNRISE TELECOM, INC.', '00D0E6': 'IBOND INC.', '00D0D1': 'Sycamore Networks', '00D087': 'MICROFIRST INC.', '00D031': 'INDUSTRIAL LOGIC CORPORATION', '00D038': 'FIVEMERE, LTD.', '00D0C6': 'THOMAS & BETTS CORP.', '00D089': 'DYNACOLOR, INC.', '00D072': 'BROADLOGIC', '00D0E2': 'MRT MICRO, INC.', '00D0FC': 'GRANITE MICROSYSTEMS', '00D01A': 'URMET TLC S.P.A.', '00D06A': 'LINKUP SYSTEMS CORPORATION', '0001A7': 'UNEX TECHNOLOGY CORPORATION', '00D0A9': 'SHINANO KENSHI CO., LTD.', '00503C': 'TSINGHUA NOVEL ELECTRONICS', '005060': 'TANDBERG TELECOM AS', '0050EE': 'TEK DIGITEL CORPORATION', '0050FF': 'HAKKO ELECTRONICS CO., LTD.', '0050D2': 'CMC Electronics Inc', '0050F9': 'Sensormatic Electronics LLC', '00D040': 'SYSMATE CO., LTD.', '00D08C': 'GENOA TECHNOLOGY, INC.', '00D059': 'AMBIT MICROSYSTEMS CORP.', '00D0FD': 'OPTIMA TELE.COM, INC.', '00D080': 'EXABYTE CORPORATION', '00D091': 'SMARTSAN SYSTEMS, INC.', '00D04E': 'LOGIBAG', '00D02C': 'CAMPBELL SCIENTIFIC, INC.', '00D0CC': 'TECHNOLOGIES LYRE INC.', '005033': 'MAYAN NETWORKS', '005045': 'RIOWORKS SOLUTIONS, INC.', '00502B': 'GENRAD LTD.', '00502E': 'CAMBEX CORPORATION', '00506E': 'CORDER ENGINEERING CORPORATION', '00502C': 'SOYO COMPUTER, INC.', '0050E6': 'HAKUSAN CORPORATION', '00D0CD': 'ATAN TECHNOLOGY INC.', '005077': 'PROLIFIC TECHNOLOGY, INC.', '005042': 'SCI MANUFACTURING SINGAPORE PTE, LTD.', '0050C0': 'GATAN, INC.', '005051': 'IWATSU ELECTRIC CO., LTD.', '0050BB': 'CMS TECHNOLOGIES', '005062': 'KOUWELL ELECTRONICS CORP. **', '0050D5': 'AD SYSTEMS CORP.', '005048': 'INFOLIBRIA', '00504E': 'SIERRA MONITOR CORP.', '0050F6': 'PAN-INTERNATIONAL INDUSTRIAL CORP.', '00506C': 'Beijer Electronics Products AB', '005022': 'ZONET TECHNOLOGY, INC.', '005007': 'SIEMENS TELECOMMUNICATION SYSTEMS LIMITED', '005040': 'Panasonic Electric Works Co., Ltd.', '0050F8': 'ENTREGA TECHNOLOGIES, INC.', '005068': 'ELECTRONIC INDUSTRIES ASSOCIATION', '00501C': 'JATOM SYSTEMS, INC.', '005092': 'Rigaku Corporation Osaka Plant', '00507A': 'XPEED, INC.', '0050F3': 'GLOBAL NET INFORMATION CO., Ltd.', '0050BE': 'FAST MULTIMEDIA AG', '00506F': 'G-CONNECT', '00507D': 'IFP', '005097': 'MMC-EMBEDDED COMPUTERTECHNIK GmbH', '0050D7': 'TELSTRAT', '005044': 'ASACA CORPORATION', '00500E': 'CHROMATIS NETWORKS, INC.', '005010': 'NovaNET Learning, Inc.', '00509A': 'TAG ELECTRONIC SYSTEMS', '0050EC': 'OLICOM A/S', '0050C9': 'MASPRO DENKOH CORP.', '005069': 'PixStream Incorporated', '00504F': 'OLENCOM ELECTRONICS', '0050EB': 'ALPHA-TOP CORPORATION', '0050EF': 'SPE Systemhaus GmbH', '005098': 'GLOBALOOP, LTD.', '0050BC': 'HAMMER STORAGE SOLUTIONS', '009071': 'Applied Innovation Inc.', '009016': 'ZAC', '0090FF': 'TELLUS TECHNOLOGY INC.', '00903E': 'N.V. PHILIPS INDUSTRIAL ACTIVITIES', '0090BA': 'VALID NETWORKS, INC.', '009018': 'ITO ELECTRIC INDUSTRY CO, LTD.', '0090CD': 'ENT-EMPRESA NACIONAL DE TELECOMMUNICACOES, S.A.', '0090D0': 'Thomson Telecom Belgium', '009094': 'OSPREY TECHNOLOGIES, INC.', '0090B3': 'AGRANAT SYSTEMS', '00905D': 'NETCOM SICHERHEITSTECHNIK GMBH', '0090DA': 'DYNARC, INC.', '0090E0': 'SYSTRAN CORP.', '009022': 'IVEX', '0090D1': 'LEICHU ENTERPRISE CO., LTD.', '009046': 'DEXDYNE, LTD.', '009053': 'DAEWOO ELECTRONICS CO., LTD.', '00909B': 'MARKEM-IMAJE', '0090FB': 'PORTWELL, INC.', '00900F': 'KAWASAKI HEAVY INDUSTRIES, LTD', '0090EA': 'ALPHA TECHNOLOGIES, INC.', '009077': 'ADVANCED FIBRE COMMUNICATIONS', '009099': 'ALLIED TELESIS, K.K.', '009055': 'PARKER HANNIFIN CORPORATION COMPUMOTOR DIVISION', '009060': 'SYSTEM CREATE CORP.', '009013': 'SAMSAN CORP.', '0090DC': 'TECO INFORMATION SYSTEMS', '0090E2': 'DISTRIBUTED PROCESSING TECHNOLOGY', '009085': 'GOLDEN ENTERPRISES, INC.', '0010A7': 'UNEX TECHNOLOGY CORPORATION', '0010D5': 'IMASDE CANARIAS, S.A.', '001055': 'FUJITSU MICROELECTRONICS, INC.', '001052': 'METTLER-TOLEDO (ALBSTADT) GMBH', '0090C7': 'ICOM INC.', '009035': 'ALPHA TELECOM, INC.', '00907A': 'Spectralink, Inc', '0090F0': 'Harmonic Video Systems Ltd.', '0090E3': 'AVEX ELECTRONICS INC.', '0090A9': 'WESTERN DIGITAL', '0090F3': 'ASPECT COMMUNICATIONS', '009020': 'PHILIPS ANALYTICAL X-RAY B.V.', '0010A3': 'OMNITRONIX, INC.', '0010AD': 'SOFTRONICS USB, INC.', '00903C': 'ATLANTIC NETWORK SYSTEMS', '0090A7': 'CLIENTEC CORPORATION', '00905C': 'EDMI', '00904F': 'ABB POWER T&D COMPANY, INC.', '00104F': 'Oracle Corporation ', '001012': 'PROCESSOR SYSTEMS (I) PVT LTD', '00106B': 'SONUS NETWORKS, INC.', '0010C3': 'CSI-CONTROL SYSTEMS', '001078': 'NUERA COMMUNICATIONS, INC.', '0010DD': 'ENABLE SEMICONDUCTOR, INC.', '0010B9': 'MAXTOR CORP.', '001015': 'OOmon Inc.', '00105D': 'Draeger Medical', '001091': 'NO WIRES NEEDED BV', '00102D': 'HITACHI SOFTWARE ENGINEERING', '001033': 'ACCESSLAN COMMUNICATIONS, INC.', '0010F4': 'Vertical Communications', '001077': 'SAF DRIVE SYSTEMS, LTD.', '0010B3': 'NOKIA MULTIMEDIA TERMINALS', '001037': "CYQ've Technology Co., Ltd.", '00101B': 'CORNET TECHNOLOGY, INC.', '0010DC': 'MICRO-STAR INTERNATIONAL CO., LTD.', '00100A': 'WILLIAMS COMMUNICATIONS GROUP', '001032': 'ALTA TECHNOLOGY', '001080': 'METAWAVE COMMUNICATIONS', '00101E': 'MATSUSHITA ELECTRONIC INSTRUMENTS CORP.', '00104D': 'SURTEC INDUSTRIES, INC.', '00107A': 'AmbiCom, Inc.', '001051': 'CMICRO CORPORATION', '00E07F': 'LOGISTISTEM s.r.l.', '00E013': 'EASTERN ELECTRONIC CO., LTD.', '00E0FD': 'A-TREND TECHNOLOGY CO., LTD.', '00E0BD': 'INTERFACE SYSTEMS, INC.', '00E06D': 'COMPUWARE CORPORATION', '00E0BB': 'NBX CORPORATION', '00E08A': 'GEC AVERY, LTD.', '00E086': 'Emerson Network Power, Avocent Division', '00E06E': 'FAR SYSTEMS S.p.A.', '00E016': 'RAPID CITY COMMUNICATIONS', '00E0EA': 'INNOVAT COMMUNICATIONS, INC.', '00E064': 'SAMSUNG ELECTRONICS', '00E0C9': 'AutomatedLogic Corporation', '00E0C5': 'BCOM ELECTRONICS INC.', '00E0EE': 'MAREL HF', '00E08E': 'UTSTARCOM', '00E03F': 'JATON CORPORATION', '00E01B': 'SPHERE COMMUNICATIONS, INC.', '00E059': 'CONTROLLED ENVIRONMENTS, LTD.', '00E0A5': 'ComCore Semiconductor, Inc.', '00E015': 'HEIWA CORPORATION', '00E069': 'JAYCOR', '00E0A4': 'ESAOTE S.p.A.', '00E0C1': 'MEMOREX TELEX JAPAN, LTD.', '00E084': 'COMPULITE R&D', '00E0D4': 'EXCELLENT COMPUTER', '00E005': 'TECHNICAL CORP.', '00E0DE': 'DATAX NV', '00E0E8': 'GRETACODER Data Systems AG', '00E0A9': 'FUNAI ELECTRIC CO., LTD.', '0060A5': 'PERFORMANCE TELECOM CORP.', '0060A1': 'VPNet, Inc.', '006027': 'Superior Modular Products', '0060BC': 'KeunYoung Electronics & Communication Co., Ltd.', '00607F': 'AURORA TECHNOLOGIES, INC.', '00E029': 'STANDARD MICROSYSTEMS CORP.', '00E0E9': 'DATA LABS, INC.', '00E00C': 'MOTOROLA', '00E00A': 'DIBA, INC.', '00E0C4': 'HORNER ELECTRIC, INC.', '0060B6': 'LAND COMPUTER CO., LTD.', '006074': 'QSC LLC', '006076': 'SCHLUMBERGER TECHNOLOGIES RETAIL PETROLEUM SYSTEMS', '006005': 'FEEDBACK DATA LTD.', '00602E': 'CYCLADES CORPORATION', '00E038': 'PROXIMA CORPORATION', '00E09C': 'MII', '00E096': 'SHIMADZU CORPORATION', '00E017': 'EXXACT GmbH', '006061': 'WHISTLE COMMUNICATIONS CORP.', '006064': 'NETCOMM LIMITED', '0060C5': 'ANCOT CORP.', '0060A9': 'GESYTEC MBH', '0060F2': 'LASERGRAPHICS, INC.', '0060C3': 'NETVISION CORPORATION', '0060EA': 'StreamLogic', '006029': 'CARY PERIPHERALS INC.', '0060A8': 'TIDOMAT AB', '0060FC': 'CONSERVATION THROUGH INNOVATION LTD.', '006018': 'STELLAR ONE CORPORATION', '00601F': 'STALLION TECHNOLOGIES', '00600A': 'SORD COMPUTER CORPORATION', '0060A4': 'GEW Technologies (PTY)Ltd', '0060E3': 'ARBIN INSTRUMENTS', '006071': 'MIDAS LAB, INC.', '00606C': 'ARESCOM', '00601B': 'MESA ELECTRONICS', '0060F9': 'DIAMOND LANE COMMUNICATIONS', '0060D4': 'ELDAT COMMUNICATION LTD.', '0060D9': 'TRANSYS NETWORKS INC.', '00A07A': 'ADVANCED PERIPHERALS TECHNOLOGIES, INC.', '00A04E': 'VOELKER TECHNOLOGIES, INC.', '00A05A': 'KOFAX IMAGE PRODUCTS', '00A052': 'STANILITE ELECTRONICS PTY. LTD', '00A05E': 'MYRIAD LOGIC INC.', '00A095': 'ACACIA NETWORKS, INC.', '00609B': 'AstroNova, Inc', '0060DB': 'NTP ELEKTRONIK A/S', '006052': 'PERIPHERALS ENTERPRISE CO., Ltd.', '00A0DF': 'STS TECHNOLOGIES, INC.', '00A094': 'COMSAT CORPORATION', '00A0EF': 'LUCIDATA LTD.', '00A005': 'DANIEL INSTRUMENTS, LTD.', '00A053': 'COMPACT DEVICES, INC.', '00A069': 'Symmetricom, Inc.', '0060B2': 'PROCESS CONTROL CORP.', '006081': 'TV/COM INTERNATIONAL', '006082': 'NOVALINK TECHNOLOGIES, INC.', '0060E7': 'RANDATA', '006054': 'CONTROLWARE GMBH', '0060C2': 'MPL AG', '00A0F2': 'INFOTEK COMMUNICATIONS, INC.', '00A0CE': 'Ecessa', '00A099': 'K-NET LTD.', '00A0EC': 'TRANSMITTON LTD.', '00A067': 'NETWORK SERVICES GROUP', '00A0E0': 'TENNYSON TECHNOLOGIES PTY LTD', '00A028': 'CONNER PERIPHERALS', '00A09E': 'ICTV', '00A020': 'CITICORP/TTI', '00A0FD': 'SCITEX DIGITAL PRINTING, INC.', '00A00F': 'Broadband Technologies', '00A002': 'LEEDS & NORTHRUP AUSTRALIA PTY LTD', '00A0E4': 'OPTIQUEST', '00A0EE': 'NASHOBA NETWORKS', '00A080': 'Tattile SRL ', '00A02B': 'TRANSITIONS RESEARCH CORP.', '00A0E8': 'REUTERS HOLDINGS PLC', '00A008': 'NETCORP', '00A050': 'CYPRESS SEMICONDUCTOR', '00A0DD': 'AZONIX CORPORATION', '00A066': 'ISA CO., LTD.', '00A0AB': 'NETCS INFORMATIONSTECHNIK GMBH', '00A0D8': 'SPECTRA - TEK', '00A0FA': 'Marconi Communication GmbH', '00A0CB': 'ARK TELECOMMUNICATIONS, INC.', '00A017': 'J B M CORPORATION', '00A025': 'REDCOM LABS INC.', '00A0BB': 'HILAN GMBH', '00A091': 'APPLICOM INTERNATIONAL', '00A0A5': 'TEKNOR MICROSYSTEME, INC.', '0020B7': 'NAMAQUA COMPUTERWARE', '0020E3': 'MCD KENCOM CORPORATION', '002013': 'DIVERSIFIED TECHNOLOGY, INC.', '0020AB': 'MICRO INDUSTRIES CORP.', '00A0C3': 'UNICOMPUTER GMBH', '00A00A': 'Airspan', '00A0E7': 'CENTRAL DATA CORPORATION', '00A075': 'MICRON TECHNOLOGY, INC.', '00A009': 'WHITETREE NETWORK', '00A00C': 'KINGMAX TECHNOLOGY, INC.', '00A0A2': 'DIGICOM S.P.A.', '00A034': 'AXEL', '00A001': 'DRS Signal Solutions', '00A054': 'Private', '0020B2': 'GKD Gesellschaft Fur Kommunikation Und Datentechnik', '002052': 'RAGULA SYSTEMS', '0020FC': 'MATROX', '0020FE': 'TOPWARE INC. / GRAND COMPUTER', '002073': 'FUSION SYSTEMS CORPORATION', '002035': 'IBM Corp', '00205A': 'COMPUTER IDENTICS', '002000': 'LEXMARK INTERNATIONAL, INC.', '00201D': 'KATANA PRODUCTS', '002003': 'PIXEL POWER LTD.', '002046': 'CIPRICO, INC.', '00207A': 'WiSE Communications, Inc.', '00203E': 'LogiCan Technologies, Inc.', '002058': 'ALLIED SIGNAL INC.', '00208D': 'CMD TECHNOLOGY', '0020DD': 'Cybertec Pty Ltd', '0020E6': 'LIDKOPING MACHINE TOOLS AB', '002026': 'AMKLY SYSTEMS, INC.', '002065': 'SUPERNET NETWORKING INC.', '002019': 'OHLER GMBH', '00209E': "BROWN'S OPERATING SYSTEM SERVICES, LTD.", '002086': 'MICROTECH ELECTRONICS LIMITED', '002023': 'T.C. TECHNOLOGIES PTY. LTD', '00208E': 'CHEVIN SOFTWARE ENG. LTD.', '002097': 'APPLIED SIGNAL TECHNOLOGY', '00209B': 'ERSAT ELECTRONIC GMBH', '00201C': 'EXCEL, INC.', '00207F': 'KYOEI SANGYO CO., LTD.', '0020C9': 'VICTRON BV', '002077': 'KARDIOS SYSTEMS CORP.', '0020E1': 'ALAMAR ELECTRONICS', '00C00B': 'NORCONTROL A.S.', '0020B0': 'GATEWAY DEVICES, INC.', '00205B': 'Kentrox, LLC', '00202A': 'N.V. DZINE', '002083': 'PRESTICOM INCORPORATED', '0020D3': 'OST (OUEST STANDARD TELEMATIQU', '0020F6': 'NET TEK AND KARLNET, INC.', '002008': 'CABLE & COMPUTER TECHNOLOGY', '0020C6': 'NECTEC', '00C00E': 'PSITECH, INC.', '00C031': 'DESIGN RESEARCH SYSTEMS, INC.', '000701': 'RACAL-DATACOM', '00C09C': 'HIOKI E.E. CORPORATION', '00C011': 'INTERACTIVE COMPUTING DEVICES', '00C03E': 'FA. GEBR. HELLER GMBH', '00C0FD': 'PROSUM', '00C014': "TELEMATICS CALABASAS INT'L,INC", '00AA3C': 'OLIVETTI TELECOM SPA (OLTECO)', '00C0C9': 'ELSAG BAILEY PROCESS', '00C048': 'BAY TECHNICAL ASSOCIATES', '00C076': 'I-DATA INTERNATIONAL A-S', '00C046': 'Blue Chip Technology Ltd', '00C060': 'ID SCANDINAVIA AS', '00C0E9': 'OAK SOLUTIONS, LTD.', '00C0C5': 'SID INFORMATICA', '00C051': 'ADVANCED INTEGRATION RESEARCH', '00C085': 'ELECTRONICS FOR IMAGING, INC.', '00C0B2': 'NORAND CORPORATION', '00C054': 'NETWORK PERIPHERALS, LTD.', '00C022': 'LASERMASTER TECHNOLOGIES, INC.', '00C025': 'DATAPRODUCTS CORPORATION', '00C0DF': 'KYE Systems Corp.', '0040CF': 'STRAWBERRY TREE, INC.', '004077': 'MAXTON TECHNOLOGY CORPORATION', '0040E7': 'ARNOS INSTRUMENTS & COMPUTER', '004087': 'UBITREX CORPORATION', '004007': 'TELMAT INFORMATIQUE', '00407B': 'SCIENTIFIC ATLANTA', '00402C': 'ISIS DISTRIBUTED SYSTEMS, INC.', '0040CC': "SILCOM MANUF'G TECHNOLOGY INC.", '00C0F5': 'METACOMP, INC.', '00C091': 'JABIL CIRCUIT, INC.', '00C049': 'U.S. ROBOTICS, INC.', '00C09D': "DISTRIBUTED SYSTEMS INT'L, INC", '00C0AA': 'SILICON VALLEY COMPUTER', '00C066': 'DOCUPOINT, INC.', '00C02D': 'FUJI PHOTO FILM CO., LTD.', '00C0F2': 'TRANSITION NETWORKS', '00C0BD': 'INEX TECHNOLOGIES, INC.', '00C088': 'EKF ELEKTRONIK GMBH', '00C097': 'ARCHIPEL SA', '00C004': 'JAPAN BUSINESS COMPUTER CO.LTD', '00C0FB': 'ADVANCED TECHNOLOGY LABS', '00C09E': 'CACHE COMPUTERS, INC.', '00C0AC': 'GAMBIT COMPUTER COMMUNICATIONS', '00C034': 'TRANSACTION NETWORK', '00C093': 'ALTA RESEARCH CORP.', '0040B8': 'IDEA ASSOCIATES', '0040E8': 'CHARLES RIVER DATA SYSTEMS,INC', '0040C0': 'VISTA CONTROLS CORPORATION', '00404D': 'TELECOMMUNICATIONS TECHNIQUES', '00400D': 'LANNET DATA COMMUNICATIONS,LTD', '00C02C': 'CENTRUM COMMUNICATIONS, INC.', '00C02B': 'GERLOFF GESELLSCHAFT FUR', '004019': 'AEON SYSTEMS, INC.', '0040BE': 'BOEING DEFENSE & SPACE', '00406E': 'COROLLARY, INC.', '004076': 'Sun Conversion Technologies', '004022': 'KLEVER COMPUTERS, INC.', '004074': 'CABLE AND WIRELESS', '004073': 'BASS ASSOCIATES', '00407D': 'EXTENSION TECHNOLOGY CORP.', '00405B': 'FUNASSET LIMITED', '00C0A0': 'ADVANCE MICRO RESEARCH, INC.', '00C010': 'HIRAKAWA HEWTECH CORP.', '00C037': 'DYNATEM', '004083': 'TDA INDUSTRIA DE PRODUTOS', '004008': 'A PLUS INFO CORPORATION', '0040B5': 'VIDEO TECHNOLOGY COMPUTERS LTD', '004012': 'WINDATA, INC.', '0040D5': 'Sartorius Mechatronics T&H GmbH ', '0040BF': "CHANNEL SYSTEMS INTERN'L INC.", '00401E': 'ICC', '00409A': 'NETWORK EXPRESS, INC.', '004094': 'SHOGRAPHICS, INC.', '004055': 'METRONIX GMBH', '004027': 'SMC MASSACHUSETTS, INC.', '0040EF': 'HYPERCOM, INC.', '004093': 'PAXDATA NETWORKS LTD.', '004085': 'SAAB INSTRUMENTS AB', '004023': 'LOGIC CORPORATION', '0040A4': 'ROSE ELECTRONICS', '0040C4': 'KINKEI SYSTEM CORPORATION', '00405D': 'STAR-TEK, INC.', '0080D7': 'Fantum Engineering', '00807A': 'AITECH SYSTEMS LTD.', '0080DC': 'PICKER INTERNATIONAL', '0040E2': 'MESA RIDGE TECHNOLOGIES, INC.', '00408C': 'AXIS COMMUNICATIONS AB', '004045': 'TWINHEAD CORPORATION', '00408B': 'RAYLAN CORPORATION', '004028': 'NETCOMM LIMITED', '0040DD': 'HONG TECHNOLOGIES', '0040CB': 'LANWAN TECHNOLOGIES', '0040B2': 'SYSTEMFORSCHUNG', '0040E6': 'C.A.E.N.', '004010': 'SONIC SYSTEMS, INC.', '0040CA': "FIRST INTERNAT'L COMPUTER, INC", '0040F0': 'MicroBrain,Inc.', '004089': 'MEIDENSHA CORPORATION', '008031': 'BASYS, CORP.', '00803A': 'VARITYPER, INC.', '004015': 'ASCOM INFRASYS AG', '008056': 'SPHINX Electronics GmbH & Co KG', '00803F': 'TATUNG COMPANY', '0080E6': 'PEER NETWORKS, INC.', '0080E0': 'XTP SYSTEMS, INC.', '008088': 'VICTOR COMPANY OF JAPAN, LTD.', '0080D8': 'NETWORK PERIPHERALS INC.', '00809E': 'DATUS GMBH', '00802B': 'INTEGRATED MARKETING CO', '008095': 'BASIC MERTON HANDELSGES.M.B.H.', '0080AE': 'HUGHES NETWORK SYSTEMS', '00801C': 'NEWPORT SYSTEMS SOLUTIONS', '0080DB': 'GRAPHON CORPORATION', '008071': 'SAI TECHNOLOGY', '008098': 'TDK CORPORATION', '00801B': 'KODIAK TECHNOLOGY', '0080D3': 'SHIVA CORP.', '0080B3': 'AVAL DATA CORPORATION', '0080A1': 'MICROTEST, INC.', '008082': 'PEP MODULAR COMPUTERS GMBH', '008039': 'ALCATEL STC AUSTRALIA', '008023': 'INTEGRATED BUSINESS NETWORKS', '00806B': 'SCHMID TELECOMMUNICATION', '008059': 'STANLEY ELECTRIC CO., LTD', '008041': 'VEB KOMBINAT ROBOTRON', '008080': 'DATAMEDIA CORPORATION', '008013': 'THOMAS-CONRAD CORPORATION', '00806E': 'NIPPON STEEL CORPORATION', '008010': 'COMMODORE INTERNATIONAL', '0080D5': 'CADRE TECHNOLOGIES', '0080CA': 'NETCOM RESEARCH INCORPORATED', '0080A9': 'CLEARPOINT RESEARCH', '0080F8': 'MIZAR, INC.', '008001': 'PERIPHONICS CORPORATION', '008097': 'CENTRALP AUTOMATISMES', '0080BF': 'TAKAOKA ELECTRIC MFG. CO. LTD.', '008017': 'PFU LIMITED', '008020': 'NETWORK PRODUCTS', '008070': 'COMPUTADORAS MICRON', '008008': 'DYNATECH COMPUTER SYSTEMS', '0000E4': 'IN2 GROUPE INTERTECHNIQUE', '000072': 'MINIWARE TECHNOLOGY', '0000A1': 'MARQUETTE ELECTRIC CO.', '0000F5': 'DIAMOND SALES LIMITED', '00005C': 'TELEMATICS INTERNATIONAL INC.', '0000AC': 'CONWARE COMPUTER CONSULTING', '008047': 'IN-NET CORP.', '008067': 'SQUARE D COMPANY', '008045': 'MATSUSHITA ELECTRIC IND. CO', '0080E7': 'Leonardo Tactical Systems.', '000094': 'ASANTE TECHNOLOGIES', '000090': 'MICROCOM', '000047': 'NICOLET INSTRUMENTS CORP.', '000070': 'HCL LIMITED', '0000B6': 'MICRO-MATIC RESEARCH', '000082': 'LECTRA SYSTEMES SA', '00002B': 'CRISP AUTOMATION, INC', '000014': 'NETRONIX', '0000A7': 'NETWORK COMPUTING DEVICES INC.', '000026': 'SHA-KEN CO., LTD.', '00008F': 'Raytheon', '000051': 'HOB ELECTRONIC GMBH & CO. KG', '0080F9': 'HEURIKON CORPORATION', '000035': 'SPECTRAGRAPHICS CORPORATION', '000021': 'SUREMAN COMP. & COMMUN. CORP.', '000074': 'RICOH COMPANY LTD.', '00003B': 'i Controls, Inc.', '000098': 'CROSSCOMM CORPORATION', '0000C6': 'EON SYSTEMS', '00008B': 'INFOTRON', '000046': 'OLIVETTI NORTH AMERICA', '00008D': 'Cryptek Inc.', '000030': 'VG LABORATORY SYSTEMS LTD', '000054': 'Schneider Electric', '000020': 'DATAINDUSTRIER DIAB AB', '00007A': 'DANA COMPUTER INC.', '000045': 'FORD AEROSPACE & COMM. CORP.', '00009C': 'ROLM MIL-SPEC COMPUTERS', '00007C': 'AMPERE INCORPORATED', '000068': 'ROSEMOUNT CONTROLS', '0000E9': 'ISICAD, INC.', '00009F': 'AMERISTAR TECHNOLOGIES INC.', '0000B3': 'CIMLINC INCORPORATED', '0000D3': 'WANG LABORATORIES INC.', '0000D0': 'DEVELCON ELECTRONICS LTD.', '000093': 'PROTEON INC.', '0000E3': 'INTEGRATED MICRO PRODUCTS LTD', '0000AD': 'BRUKER INSTRUMENTS INC.', '08008C': 'NETWORK RESEARCH CORPORATION', '080081': 'ASTECH INC.', '0000F1': 'MAGNA COMPUTER CORPORATION', '080057': 'Evans & Sutherland', '08005D': 'GOULD INC.', '08005B': 'VTA TECHNOLOGIES INC.', '080011': 'TEKTRONIX INC.', '080071': 'MATRA (DSIE)', '08006C': "SUNTEK TECHNOLOGY INT'L", '080067': 'ComDesign', '080026': 'NORSK DATA A.S.', '080025': 'CONTROL DATA', '08002D': 'LAN-TEC INC.', 'AA0004': 'DIGITAL EQUIPMENT CORPORATION', '08000C': 'MIKLYN DEVELOPMENT CO.', '00DD05': 'UNGERMANN-BASS INC.', '00DD00': 'UNGERMANN-BASS INC.', '0000AA': 'XEROX CORPORATION', '100000': 'Private', '080003': 'ADVANCED COMPUTER COMM.', '00DD03': 'UNGERMANN-BASS INC.', '00DD0F': 'UNGERMANN-BASS INC.', '0000D7': 'DARTMOUTH COLLEGE', '080017': 'NATIONAL SEMICONDUCTOR', '040AE0': 'XMIT AG COMPUTER NETWORKS', '08001D': 'ABLE COMMUNICATIONS INC.', '00DD0B': 'UNGERMANN-BASS INC.', '542160': 'Alula', '10B3C6': 'Cisco Systems, Inc', '10B3D6': 'Cisco Systems, Inc', 'F854B8': 'Amazon Technologies Inc.', '84C807': 'ADVA Optical Networking Ltd.', '000001': 'XEROX CORPORATION', '5CCD5B': 'Intel Corporate', 'A0AB51': 'WEIFANG GOERTEK ELECTRONICS CO.,LTD', '589630': 'Technicolor CH USA Inc.', '781735': 'Nokia Shanghai Bell Co., Ltd.', '3C894D': 'Dr. Ing. h.c. F. Porsche AG', 'B43939': 'Shenzhen TINNO Mobile Technology Corp.', '749EF5': 'Samsung Electronics Co.,Ltd', '68BFC4': 'Samsung Electronics Co.,Ltd', '04B1A1': 'Samsung Electronics Co.,Ltd', 'CC464E': 'Samsung Electronics Co.,Ltd', '64C901': 'INVENTEC Corporation', '306F07': 'Nations Technologies Inc.', '5CE50C': 'Beijing Xiaomi Mobile Software Co., Ltd', 'A85E45': 'ASUSTek COMPUTER INC.', '849A40': 'Hangzhou Hikvision Digital Technology Co.,Ltd.', '18703B': 'Huawei Device Co., Ltd.', 'D89E61': 'Huawei Device Co., Ltd.', '347E00': 'Huawei Device Co., Ltd.', 'F8893C': 'Inventec Appliances Corp.', 'A0DF15': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D8D5B9': 'Rainforest Automation, Inc.', 'C4AD34': 'Routerboard.com', '5C710D': 'Cisco Systems, Inc', '7CD566': 'Amazon Technologies Inc.', '686350': 'Hella India Automotive Pvt Ltd', '98AF65': 'Intel Corporate', '80751F': 'BSkyB Ltd', '2474F7': 'GoPro', '003092': 'Kontron Electronics AG', 'F855CD': 'Visteon Corporation', '441847': 'HUNAN SCROWN ELECTRONIC INFORMATION TECH.CO.,LTD', 'E85A8B': 'Xiaomi Communications Co Ltd', '442295': 'China Mobile Iot Limited company', '00AB48': 'eero inc.', '003056': 'HMS Industrial Networks', '680AE2': 'Silicon Laboratories', 'AC8B9C': 'Primera Technology, Inc.', '2C3AFD': 'AVM Audiovisuelles Marketing und Computersysteme GmbH', '1CC1BC': 'Yichip\xa0Microelectronics (Hangzhou) Co.,Ltd', 'AC61B9': 'WAMA Technology Limited', 'C4D8F3': 'iZotope', '801609': 'Sleep Number', '848094': 'Meter, Inc.', '10B3D5': 'Cisco Systems, Inc', '30A2C2': 'Huawei Device Co., Ltd.', 'DCDFD6': 'zte corporation', 'ACA88E': 'SHARP Corporation', '98415C': 'Nintendo Co.,Ltd', 'F04F7C': 'Private', '705425': 'ARRIS Group, Inc.', '5C0BCA': 'Tunstall Nordic AB', '283334': 'Huawei Device Co., Ltd.', 'F0A225': 'Private', '50A132': 'Shenzhen MiaoMing Intelligent Technology Co.,Ltd', '807871': 'ASKEY COMPUTER CORP', '4CB1CD': 'Ruckus Wireless', 'F49C12': 'Structab AB', '88517A': 'HMD Global Oy', 'ACB3B5': 'HUAWEI TECHNOLOGIES CO.,LTD', '083A88': 'Universal Global Scientific Industrial Co., Ltd.', '08318B': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F4B688': 'PLANTRONICS, INC.', '4C7A48': 'Nippon Seiki (Europe) B.V.', '84D15A': 'TCT mobile ltd', '047F0E': 'Barrot Technology Limited', 'B8F653': 'Shenzhen Jingxun Software Telecommunication Technology Co.,Ltd', '60AB14': 'LG Innotek', 'BC62D2': 'Genexis International B.V.', '6C9E7C': 'Fiberhome Telecommunication Technologies Co.,LTD', 'BCBAC2': 'Hangzhou Hikvision Digital Technology Co.,Ltd.', '44D5F2': 'IEEE Registration Authority', '0CDD24': 'Intel Corporate', '000C86': 'Cisco Systems, Inc', 'F83CBF': 'BOTATO ELECTRONICS SDN BHD', 'FC589A': 'Cisco Systems, Inc', 'F08620': 'Arcadyan Corporation', 'DCCC8D': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', 'F05C77': 'Google, Inc.', '111111': 'Private', '6CD71F': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'F06865': 'Taicang T&W Electronics', 'A463A1': 'Inventus Power Eletronica do Brasil LTDA', '3C9D56': 'HUAWEI TECHNOLOGIES CO.,LTD', '70FD45': 'HUAWEI TECHNOLOGIES CO.,LTD', '446747': 'HUAWEI TECHNOLOGIES CO.,LTD', '884A70': 'Wacom Co.,Ltd.', 'F4B5BB': 'CERAGON NETWORKS', '507AC5': 'Apple, Inc.', '4C6BE8': 'Apple, Inc.', '8C861E': 'Apple, Inc.', '542B8D': 'Apple, Inc.', '001D29': 'Doro AB', 'ECA5DE': 'ONYX WIFI Inc', '8C4962': 'Roku, Inc', '1033BF': 'Technicolor CH USA Inc.', '347563': 'SHENZHEN RF-LINK TECHNOLOGY CO.,LTD.', '142E5E': 'Sercomm Corporation.', '0025CB': 'Reiner SCT', '44237C': 'Beijing Xiaomi Mobile Software Co., Ltd', '50EB71': 'Intel Corporate', 'C064E4': 'Cisco Systems, Inc', '50804A': 'Quectel Wireless Solutions Co., Ltd.', '309435': 'vivo Mobile Communication Co., Ltd.', 'D44BB6': 'Zhejiang Tmall Technology Co., Ltd.', '38184C': 'Sony Home Entertainment&Sound Products Inc', 'D82FE6': 'Zhejiang Tmall Technology Co., Ltd.', '140F42': 'Nokia', '006D61': 'Guangzhou V-SOLUTION Electronic Technology Co., Ltd.', 'C4AC59': 'Murata Manufacturing Co., Ltd.', '5816D7': 'ALPS ELECTRIC CO., LTD.', 'FCA47A': 'IEEE Registration Authority', 'E419C1': 'HUAWEI TECHNOLOGIES CO.,LTD', 'B86685': 'Sagemcom Broadband SAS', '381A52': 'Seiko Epson Corporation', '000A17': 'NESTAR COMMUNICATIONS, INC', 'D8AF81': 'ZAO NPK Rotek', 'E4FDA1': 'HUAWEI TECHNOLOGIES CO.,LTD', 'B452A9': 'Texas Instruments', '54EF44': 'Lumi United Technology Co., Ltd', '402B50': 'ARRIS Group, Inc.', '78CC2B': 'SINEWY TECHNOLOGY CO., LTD', 'B80756': 'Cisco Meraki', 'D0C857': 'IEEE Registration Authority', 'FCBCD1': 'HUAWEI TECHNOLOGIES CO.,LTD', '7460FA': 'HUAWEI TECHNOLOGIES CO.,LTD', 'CC3ADF': 'Private', '38EFE3': ' INGENICO TERMINALS SAS', '50D4F7': 'TP-LINK TECHNOLOGIES CO.,LTD.', '5C879C': 'Intel Corporate', '24EE9A': 'Intel Corporate', '001F47': 'MCS Logic Inc.', '8CFD18': 'HUAWEI TECHNOLOGIES CO.,LTD', 'B45459': 'China Mobile (Hangzhou) Information Technology Co., Ltd.', '000970': 'Vibration Research Corporation', '14A2A0': 'Cisco Systems, Inc', 'E4AB89': 'MitraStar Technology Corp.', '78C313': 'China Mobile Group Device Co.,Ltd.', '7434AE': 'this is engineering Inc.', '74ADB7': 'China Mobile Group Device Co.,Ltd.', '6095CE': 'IEEE Registration Authority', '8CE5C0': 'Samsung Electronics Co.,Ltd', 'F08A76': 'Samsung Electronics Co.,Ltd', 'ECAA25': 'Samsung Electronics Co.,Ltd', '687D6B': 'Samsung Electronics Co.,Ltd', '485169': 'Samsung Electronics Co.,Ltd', 'C40683': 'HUAWEI TECHNOLOGIES CO.,LTD', '94D00D': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C48A5A': 'JFCONTROL', 'B49A95': 'Shenzhen Boomtech Industrial Corporation', 'AC83E9': 'Beijing Zile Technology Co., Ltd', 'D8CA06': 'Titan DataCenters France', '1C20DB': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D0C65B': 'HUAWEI TECHNOLOGIES CO.,LTD', '9078B2': 'Xiaomi Communications Co Ltd', 'B4CFE0': 'Sichuan tianyi kanghe communications co., LTD', 'BC7FA4': 'Xiaomi Communications Co Ltd', 'FC492D': 'Amazon Technologies Inc.', '74EE2A': 'SHENZHEN BILIAN ELECTRONIC CO.,LTD', '087E64': 'Technicolor CH USA Inc.', '080039': 'SPIDER SYSTEMS LIMITED', '90473C': 'China Mobile Group Device Co.,Ltd.', '889E33': 'TCT mobile ltd', '6C8AEC': 'Nantong Coship Electronics Co., Ltd.', '84C2E4': 'Jiangsu Qinheng Co., Ltd.', '7C21D8': 'Shenzhen Think Will Communication Technology co., LTD.', 'FCEA50': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '00E06B': 'W&G SPECIAL PRODUCTS', '045C6C': 'Juniper Networks', 'D8F15B': 'Espressif Inc.', 'D4F057': 'Nintendo Co.,Ltd', '6CF17E': 'Zhejiang Uniview Technologies Co.,Ltd.', '083A2F': 'Guangzhou Juan Intelligent Tech Joint Stock Co.,Ltd', '1C3A60': 'Ruckus Wireless', 'D4351D': 'Technicolor', '6009C3': 'u-blox AG', '488764': 'vivo Mobile Communication Co., Ltd.', '5C1CB9': 'vivo Mobile Communication Co., Ltd.', 'C0FD84': 'zte corporation', '444B7E': 'Fiberhome Telecommunication Technologies Co.,LTD', 'DC8C37': 'Cisco Systems, Inc', 'E8D0FC': 'Liteon Technology Corporation', 'E8E8B7': 'Murata Manufacturing Co., Ltd.', '103D3E': 'China Mobile Group Device Co.,Ltd.', '7C50DA': 'Private', '64CC22': 'Arcadyan Corporation', '4C9157': 'Fujian LANDI Commercial Equipment Co.,Ltd', '9C25BE': 'Wildlife Acoustics, Inc.', 'D039EA': 'NetApp', 'F8DFE1': 'MyLight Systems', '60D2DD': 'Shenzhen Baitong Putian Technology Co.,Ltd.', '788C77': 'LEXMARK INTERNATIONAL, INC.', '3C0C7D': 'Tiny Mesh AS', '3476C5': 'I-O DATA DEVICE,INC.', '24DA33': 'HUAWEI TECHNOLOGIES CO.,LTD', 'FCAB90': 'HUAWEI TECHNOLOGIES CO.,LTD', '5893D8': 'Texas Instruments', '5051A9': 'Texas Instruments', '988B0A': 'Hangzhou Hikvision Digital Technology Co.,Ltd.', 'A4975C': 'VTech Telecommunications Ltd.', 'B02A1F': ' Wingtech Group (HongKong)Limited', 'DC680C': 'Hewlett Packard Enterprise', 'F40270': 'Dell Inc.', '1C2704': 'zte corporation', '5078B3': 'zte corporation', 'F0D4F7': 'varram system', 'E0CC7A': 'HUAWEI TECHNOLOGIES CO.,LTD', '6C23CB': 'Wattty Corporation', '60AB67': 'Xiaomi Communications Co Ltd', 'AC710C': 'China Mobile Group Device Co.,Ltd.', 'A8DB03': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', '308944': 'DEVA Broadcast Ltd.', 'F47960': 'HUAWEI TECHNOLOGIES CO.,LTD', '145290': 'KNS Group LLC (YADRO Company)', '5C32C5': 'Teracom Ltd.', 'ACEE70': 'Fontem Ventures BV', 'ACE2D3': 'Hewlett Packard', '00FD22': 'Cisco Systems, Inc', '4418FD': 'Apple, Inc.', '00B600': 'VOIM Co., Ltd.', '98FA9B': 'LCFC(HeFei) Electronics Technology co., ltd', '005B94': 'Apple, Inc.', 'E0897E': 'Apple, Inc.', 'B00CD1': 'Hewlett Packard', '4846C1': 'FN-LINK TECHNOLOGY LIMITED', 'B4D0A9': 'China Mobile Group Device Co.,Ltd.', 'FC29F3': 'McPay Co.,LTD.', 'F8AFDB': 'Fiberhome Telecommunication Technologies Co.,LTD', '4889E7': 'Intel Corporate', 'A0BD1D': 'Zhejiang Dahua Technology Co., Ltd.', 'E49F1E': 'ARRIS Group, Inc.', '002615': 'Teracom Limited', '9C8EDC': 'Teracom Limited', '000191': 'SYRED Data Systems', 'ACD564': 'CHONGQING FUGUI ELECTRONICS CO.,LTD.', '94D075': 'CIS Crypto', '28B4FB': 'Sprocomm Technologies CO.,LTD.', '40F9D5': 'Tecore Networks', 'CC2C83': 'DarkMatter L.L.C', 'DCED84': 'Haverford Systems Inc', '644C36': 'Intel Corporate', '7C573C': 'Aruba, a Hewlett Packard Enterprise Company', '2C01B5': 'Cisco Systems, Inc', '28EF01': 'Private', 'C05336': 'Beijing National Railway Research & Design Institute of Signal & Communication Group Co..Ltd.', '606ED0': 'SEAL AG', '2CCCE6': 'Skyworth Digital Technology(Shenzhen) Co.,Ltd', 'E44CC7': 'IEEE Registration Authority', 'D4E880': 'Cisco Systems, Inc', 'A8346A': 'Samsung Electronics Co.,Ltd', '3C20F6': 'Samsung Electronics Co.,Ltd', '7C38AD': 'Samsung Electronics Co.,Ltd', 'D49CDD': 'AMPAK Technology,Inc.', '50F722': 'Cisco Systems, Inc', '9849E1': 'Boeing Defence Australia', '04D7A5': 'New H3C Technologies Co., Ltd', '4C4D66': 'Nanjing Jiahao Technology Co., Ltd.', 'A4817A': 'CIG SHANGHAI CO LTD', '905851': 'Technicolor CH USA Inc.', '9809CF': 'OnePlus Technology (Shenzhen) Co., Ltd', 'B8DE5E': 'LONGCHEER TELECOMMUNICATION LIMITED', '885A06': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '5447D3': 'TSAT AS', 'CCEDDC': 'MitraStar Technology Corp.', '3CF011': 'Intel Corporate', 'CCD81F': 'Maipu Communication Technology Co.,Ltd.', '688B0F': 'China Mobile IOT Company Limited', 'F82F6A': 'ITEL MOBILE LIMITED', 'B068E6': 'CHONGQING FUGUI ELECTRONICS CO.,LTD.', 'A4E7E4': 'Connex GmbH', 'B8EF8B': 'SHENZHEN CANNICE TECHNOLOGY CO.,LTD', 'B8186F': 'ORIENTAL MOTOR CO., LTD.', '001A3F': 'Intelbras', 'C0D834': 'xvtec ltd', 'B8C253': 'Juniper Networks', 'F05C19': 'Aruba, a Hewlett Packard Enterprise Company', '04BD88': 'Aruba, a Hewlett Packard Enterprise Company', '9C1C12': 'Aruba, a Hewlett Packard Enterprise Company', '18DFB4': 'BOSUNG POWERTEC CO.,LTD.', '000147': 'Zhone Technologies', '20B780': 'Toshiba Visual Solutions Corporation Co.,Ltd', 'E03717': 'Technicolor CH USA Inc.', '14D4FE': 'ARRIS Group, Inc.', '304F75': 'DASAN Network Solutions', 'ECA9FA': 'GUANGDONG GENIUS TECHNOLOGY CO., LTD.', '0003A5': 'Medea Corporation', 'BCE67C': 'Cambium Networks Limited', '7C1E06': 'New H3C Technologies Co., Ltd', 'F0B31E': 'Universal Electronics, Inc.', 'F89173': 'AEDLE SAS', 'C84F86': 'Sophos Ltd', '6429ED': 'AO PKK Milandr', '443C88': 'FICOSA MAROC INTERNATIONAL', '841C70': 'zte corporation', '544741': 'XCHENG HOLDING', 'CCF735': 'Amazon Technologies Inc.', 'C4F839': 'Actia Automotive', 'C8F742': 'HangZhou Gubei Electronics Technology Co.,Ltd', '006FF2': 'MITSUMI ELECTRIC CO.,LTD.', '30DF8D': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', 'D4C93C': 'Cisco Systems, Inc', '78DD12': 'Arcadyan Corporation', '2C5D34': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '9C1463': 'Zhejiang Dahua Technology Co., Ltd.', '002433': 'ALPS ELECTRIC CO., LTD.', '002306': 'ALPS ELECTRIC CO., LTD.', 'B4EC02': 'ALPS ELECTRIC CO., LTD.', '646038': 'Hirschmann Automation and Control GmbH', '7018A7': 'Cisco Systems, Inc', 'CCD39D': 'IEEE Registration Authority', 'E0750A': 'ALPS ELECTRIC CO., LTD.', '0019C1': 'ALPS ELECTRIC CO., LTD.', '0016FE': 'ALPS ELECTRIC CO., LTD.', '9C8D7C': 'ALPS ELECTRIC CO., LTD.', 'D425CC': 'IEEE Registration Authority', '8C6DC4': 'Megapixel VR', 'BC7536': 'ALPS ELECTRIC CO., LTD.', 'E0AE5E': 'ALPS ELECTRIC CO., LTD.', 'D4B761': 'Sichuan\xa0AI-Link\xa0Technology\xa0Co.,\xa0Ltd.', '7C035E': 'Xiaomi Communications Co Ltd', '44FE3B': 'Arcadyan Corporation', 'D83AF5': 'Wideband Labs LLC', '38D9A5': 'Mikotek Information Inc. ', '4C875D': 'Bose Corporation', '982CBC': 'Intel Corporate', 'B0E7DE': 'Homa Technologies JSC', '649D99': 'FS COM INC', '00169D': 'Cisco Systems, Inc', '4C962D': 'Fresh AB', '00D279': 'VINGROUP JOINT STOCK COMPANY', '484A30': 'George Robotics Limited', '4861A3': 'Concern Axion JSC', '304A26': 'Shenzhen Trolink Technology CO, LTD', '4CE5AE': 'Tianjin Beebox Intelligent Technology Co.,Ltd.', 'E4D3AA': 'FUJITSU CONNECTED TECHNOLOGIES LIMITED', 'D467D3': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'A41232': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '48E3C3': 'JENOPTIK Advanced Systems GmbH', 'CC355A': 'SecuGen Corporation', '80546A': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', 'B447F5': 'Earda Technologies co Ltd', 'F4C7C8': 'Kelvin Inc.', 'A89CA4': 'Furrion Limited', '00004C': 'NEC Corporation', '8CCF8F': 'ITC Systems', '2875D8': 'FUJIAN STAR-NET COMMUNICATION CO.,LTD', '90E202': 'Texas Instruments', '302478': 'Sagemcom Broadband SAS', '7C89C1': 'Palo Alto Networks', '84A93E': 'Hewlett Packard', 'B0C387': 'GOEFER, Inc.', '086BD7': 'Silicon Laboratories', '349342': 'TTE Corporation', '589EC6': 'Gigaset Communications GmbH', '64C753': 'Apple, Inc.', '6458AD': 'China Mobile IOT Company Limited', '1CF29A': 'Google, Inc.', '748A0D': 'ARRIS Group, Inc.', 'CC75E2': 'ARRIS Group, Inc.', 'A0A3B8': 'WISCLOUD', '38F9D3': 'Apple, Inc.', 'FC183C': 'Apple, Inc.', 'A40C66': 'Shenzhen Colorful Yugong Technology and Development Co., Ltd.', '4455B1': 'HUAWEI TECHNOLOGIES CO.,LTD', '98F9C7': 'IEEE Registration Authority', 'FC7774': 'Intel Corporate', '700B4F': 'Cisco Systems, Inc', 'E4388C': 'Digital Products Limited', '184BDF': 'Caavo Inc', 'B89A9A': 'Xin Shi Jia Technology (Beijing) Co.,Ltd', '8C7BF0': 'Xufeng Development Limited', 'E0A509': 'Bitmain Technologies Inc', '3C5CC4': 'Amazon Technologies Inc.', 'D8A756': 'Sagemcom Broadband SAS', 'D8D6F3': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '6C2CDC': 'Skyworth Digital Technology(Shenzhen) Co.,Ltd', '7835A0': 'Zurn Industries LLC', 'F43909': 'Hewlett Packard', '201F31': 'Inteno Broadband Technology AB', '2CCC44': 'Sony Interactive Entertainment Inc.', 'F47DEF': 'Samsung Electronics Co.,Ltd', '7C8BB5': 'Samsung Electronics Co.,Ltd', '54833A': 'Zyxel Communications Corporation', '98ED5C': 'Tesla Motors, Inc', '787052': 'Welotec GmbH', 'D8A98B': 'Texas Instruments', '00116C': 'Nanwang Multimedia Inc.,Ltd', '10B9F7': 'Niko-Servodan', '14EFCF': 'SCHREDER', '3830F9': 'LG Electronics (Mobile Communications)', 'A83FA1': 'IEEE Registration Authority', '7847E3': 'SICHUAN TIANYI COMHEART TELECOM CO.,LTD', '6C9BC0': 'Chemoptics Inc.', 'F4DBE6': 'Cisco Systems, Inc', '248498': 'Beijing Jiaoda Microunion Tech.Co.,Ltd.', 'C074AD': 'Grandstream Networks, Inc.', 'F095F1': 'Carl Zeiss AG', '00F48D': 'Liteon Technology Corporation', '702ED9': 'Guangzhou Shiyuan Electronics Co., Ltd.', '70192F': 'HUAWEI TECHNOLOGIES CO.,LTD', '10C22F': 'China Entropy Co., Ltd.', 'BC3865': 'JWCNETWORKS', '04EB40': 'Cisco Systems, Inc', '18A7F1': 'Qingdao Haier Technology Co.,Ltd', '90E17B': 'Apple, Inc.', 'D81C79': 'Apple, Inc.', '58E6BA': 'Apple, Inc.', '44E4EE': 'Wistron Neweb Corporation', 'DC41E5': 'Shenzhen Zhixin Data Service Co., Ltd.', '00A5BF': 'Cisco Systems, Inc', 'C8BAE9': 'QDIS', '1801F1': 'Xiaomi Communications Co Ltd', 'C44F33': 'Espressif Inc.', '546AD8': 'Elster Water Metering', 'C0847D': 'AMPAK Technology, Inc.', '0409A5': 'HFR, Inc.', '94917F': 'ASKEY COMPUTER CORP', '9C0CDF': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '242124': 'Nokia', '949B2C': 'Extreme Networks, Inc.', '7CD30A': 'INVENTEC CORPORATION', '001E33': 'INVENTEC CORPORATION', 'FC1D84': 'Autobase', '18AC9E': 'ITEL MOBILE LIMITED', 'EC84B4': 'CIG SHANGHAI CO LTD', '00D096': '3COM EUROPE LTD', '002654': '3COM', '0050DA': '3COM', '000476': '3COM', '000475': '3COM', '4422F1': 'S.FAC, INC', '3009F9': 'IEEE Registration Authority', 'B4DDD0': 'Continental Automotive Hungary Kft', '48F027': 'Chengdu newifi Co.,Ltd', '14C697': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '7C03AB': 'Xiaomi Communications Co Ltd', 'DC16B2': 'HUAWEI TECHNOLOGIES CO.,LTD', '24FB65': 'HUAWEI TECHNOLOGIES CO.,LTD', '0CB527': 'HUAWEI TECHNOLOGIES CO.,LTD', 'B42E99': 'GIGA-BYTE TECHNOLOGY CO.,LTD.', '342CC4': 'Compal Broadband Networks, Inc.', '14E9B2': 'Fiberhome Telecommunication Technologies Co.,LTD', 'C8544B': 'Zyxel Communications Corporation', 'D07FA0': 'Samsung Electronics Co.,Ltd', '009093': 'EIZO Corporation', '4C1159': 'Vision Information & Communications', '00049F': 'Freescale Semiconductor', '000CE6': 'Meru Networks Inc', '00D07B': 'COMCAM INTERNATIONAL INC', '78524A': 'Ensenso GmbH', 'E4FC82': 'Juniper Networks', '00B5D0': 'Samsung Electronics Co.,Ltd', '1496E5': 'Samsung Electronics Co.,Ltd', 'C46E7B': 'SHENZHEN RF-LINK TECHNOLOGY CO.,LTD.', 'C048FB': 'Shenzhen JingHanDa Electronics Co.Ltd', '20E882': 'zte corporation', 'F09FFC': 'SHARP Corporation', '0CB5DE': 'Alcatel Lucent', '000B3B': 'devolo AG', '240588': 'Google, Inc.', '50DCFC': 'ECOCOM', '700B01': 'Sagemcom Broadband SAS', '5C2623': 'WaveLynx Technologies Corporation', '303855': 'Nokia Corporation', '00B670': 'Cisco Systems, Inc', 'AC6417': 'Siemens AG', '347916': 'HUAWEI TECHNOLOGIES CO.,LTD', '3466EA': 'VERTU INTERNATIONAL CORPORATION LIMITED', '28385C': 'FLEXTRONICS', '0C1C57': 'Texas Instruments', '806FB0': 'Texas Instruments', '883F99': 'Siemens AG', 'EC6F0B': 'FADU, Inc.', '0006EC': 'Harris Corporation', '00BB60': 'Intel Corporate', '7C6DA6': 'Superwave Group LLC', 'D016B4': 'HUAWEI TECHNOLOGIES CO.,LTD', '20A8B9': 'SIEMENS AG', 'F0F08F': 'Nextek Solutions Pte Ltd', '8CB0E9': 'Samsung Electronics.,LTD', '1C3947': 'COMPAL INFORMATION (KUNSHAN) CO., LTD. ', '342792': 'FREEBOX SAS', '40A108': 'Motorola (Wuhan) Mobility Technologies Communication Co., Ltd.', '705AB6': 'COMPAL INFORMATION (KUNSHAN) CO., LTD. ', '201A06': 'COMPAL INFORMATION (KUNSHAN) CO., LTD. ', 'F8A963': 'COMPAL INFORMATION (KUNSHAN) CO., LTD. ', 'DC0EA1': 'COMPAL INFORMATION (KUNSHAN) CO., LTD. ', 'B870F4': 'COMPAL INFORMATION (KUNSHAN) CO., LTD. ', '009D6B': 'Murata Manufacturing Co., Ltd.', '745933': 'Danal Entertainment', 'EC58EA': 'Ruckus Wireless', '7C9A54': 'Technicolor CH USA Inc.', '388B59': 'Google, Inc.', '880118': 'BLT Co', 'A42618': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '34E12D': 'Intel Corporate', 'A46191': 'NamJunSa', '84A24D': 'Birds Eye Systems Private Limited', '7C6B9C': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '0017B6': 'Aquantia Corporation', '105917': 'Tonal', 'D0EFC1': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D45251': 'IBT Ingenieurbureau Broennimann Thun', '0018B5': 'Magna Carta', 'D87EB1': 'x.o.ware, inc.', '485702': 'HUAWEI TECHNOLOGIES CO.,LTD', '644F42': 'JETTER CO., Ltd.', 'DCAF68': 'WEIFANG GOERTEK ELECTRONICS CO.,LTD', '24EC51': 'ADF Technologies Sdn Bhd', '7089CC': 'China Mobile Group Device Co.,Ltd.', '2C5BE1': 'Centripetal Networks, Inc', 'DCEFCA': 'Murata Manufacturing Co., Ltd.', '00BC60': 'Cisco Systems, Inc', 'CC7B61': 'NIKKISO CO., LTD.', '9C713A': 'HUAWEI TECHNOLOGIES CO.,LTD', '2C97B1': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A89969': 'Dell Inc.', 'A4EA8E': 'Extreme Networks, Inc.', '882D53': 'Baidu Online Network Technology (Beijing) Co., Ltd.', '00D0B5': 'IPricot formerly DotCom', '746BAB': 'GUANGDONG ENOK COMMUNICATION CO., LTD', '0CB6D2': 'D-Link International', '7829ED': 'ASKEY COMPUTER CORP', '5061BF': 'Cisco Systems, Inc', '0009DF': 'Vestel Elektronik San ve Tic. A.Ş.', 'F4032F': 'Reduxio Systems', '944A0C': 'Sercomm Corporation.', '000FA2': '2xWireless', '108EE0': 'Samsung Electronics Co.,Ltd', 'FCA621': 'Samsung Electronics Co.,Ltd', '8CF228': 'MERCURY COMMUNICATION TECHNOLOGIES CO.,LTD.', '700F6A': 'Cisco Systems, Inc', '000BB2': 'SMALLBIG TECHNOLOGY', '00305E': 'Abelko Innovation', 'FC6BF0': 'TOPWELL INTERNATIONAL HOLDINDS LIMITED', '001477': 'Trilliant', '00079B': 'Aurora Networks', '544810': 'Dell Inc.', '54B203': 'PEGATRON CORPORATION', '3868DD': 'INVENTEC CORPORATION', '3C6AA7': 'Intel Corporate', 'B8B7F1': 'Wistron Neweb Corporation', '8050F6': 'ITEL MOBILE LIMITED', 'A8CAB9': 'SAMSUNG ELECTRO MECHANICS CO., LTD.', '203956': 'HMD Global Oy', '78AFE4': 'Comau S.p.A', '90A137': 'Beijing Splendidtel Communication Technology Co,. Ltd', '80029C': 'Gemtek Technology Co., Ltd.', 'D0C5D3': 'AzureWave Technology Inc.', '14169E': ' Wingtech Group (HongKong)Limited', 'F8C39E': 'HUAWEI TECHNOLOGIES CO.,LTD', 'E8D099': 'Fiberhome Telecommunication Technologies Co.,LTD', '107BA4': 'Olive & Dove Co.,Ltd.', '7C41A2': 'Nokia', 'BC325F': 'Zhejiang Dahua Technology Co., Ltd.', '505BC2': 'Liteon Technology Corporation', '6C21A2': 'AMPAK Technology, Inc.', '9C2F73': 'Universal Tiancheng Technology (Beijing) Co., Ltd.', 'D832E3': 'Xiaomi Communications Co Ltd', '9487E0': 'Xiaomi Communications Co Ltd', '38AF29': 'Zhejiang Dahua Technology Co., Ltd.', 'C88629': 'Shenzhen Duubee Intelligent Technologies Co.,LTD.', 'CCC2E0': 'Raisecom Technology CO., LTD', '300AC5': 'Ruio telecommunication technologies Co., Limited', '00E065': 'OPTICAL ACCESS INTERNATIONAL', '4466FC': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'A028ED': 'HMD Global Oy', 'AC5474': 'China Mobile IOT Company Limited', '8C1CDA': 'IEEE Registration Authority', '0007A8': 'Haier Group Technologies Ltd', '9814D2': 'Avonic', '1409DC': 'HUAWEI TECHNOLOGIES CO.,LTD', 'EC9365': 'Mapper.ai, Inc.', '38BAF8': 'Intel Corporate', 'C4BAA3': 'Beijing Winicssec Technologies Co., Ltd.', '9CFEA1': 'Fiberhome Telecommunication Technologies Co.,LTD', 'D88A3B': 'UNIT-EM', 'EC5A86': 'Yulong Computer Telecommunication Scientific (Shenzhen) Co.,Ltd', 'C0EEB5': 'Enice Network.', '60DEF3': 'HUAWEI TECHNOLOGIES CO.,LTD', '50A009': 'Xiaomi Communications Co Ltd', '88964E': 'ARRIS Group, Inc.', '883F4A': 'Texas Instruments', '9CA615': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'E44E76': 'CHAMPIONTECH ENTERPRISE (SHENZHEN) INC', '004098': 'DRESSLER GMBH & CO.', '001DFA': 'Fujian LANDI Commercial Equipment Co.,Ltd', '9CE65E': 'Apple, Inc.', 'C49880': 'Apple, Inc.', 'E0338E': 'Apple, Inc.', '08F69C': 'Apple, Inc.', '04FA83': 'Qingdao Haier Technology Co.,Ltd', '78F9B4': 'Nokia', '2016B9': 'Intel Corporate', 'D076E7': 'TP-LINK TECHNOLOGIES CO.,LTD.', '50A67F': 'Apple, Inc.', 'D461DA': 'Apple, Inc.', 'F01898': 'Apple, Inc.', '881908': 'Apple, Inc.', '5C0947': 'Apple, Inc.', '14205E': 'Apple, Inc.', 'B841A4': 'Apple, Inc.', '00165C': 'Trackflow Ltd.', '641CAE': 'Samsung Electronics Co.,Ltd', 'F8E44E': 'MCOT INC.', '40CD7A': 'Qingdao Hisense Communications Co.,Ltd.', 'DC4EF4': 'Shenzhen MTN Electronics CO., Ltd', 'F08173': 'Amazon Technologies Inc.', 'EC65CC': 'Panasonic Automotive Systems Company of America', '949990': 'VTC Telecommunications', 'F4BC97': 'Shenzhen Crave Communication Co., LTD', '28FEDE': 'COMESTA, Inc.', '907910': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '003DE8': 'LG Electronics (Mobile Communications)', '68FEDA': 'Fiberhome Telecommunication Technologies Co.,LTD', 'E8986D': 'Palo Alto Networks', '144E34': 'Remote Solution', '00508B': 'Hewlett Packard', '146B9C': 'SHENZHEN BILIAN ELECTRONIC CO.,LTD', '948DEF': 'Oetiker Schweiz AG', '2CD974': 'Hui Zhou Gaoshengda Technology Co.,LTD', 'D4F786': 'Fiberhome Telecommunication Technologies Co.,LTD', '3403DE': 'Texas Instruments', '94B86D': 'Intel Corporate', '240A63': 'ARRIS Group, Inc.', 'F88B37': 'ARRIS Group, Inc.', '20677C': 'Hewlett Packard Enterprise', '34D712': 'Smartisan Digital Co., Ltd', 'A06610': 'FUJITSU LIMITED', '44FFBA': 'zte corporation', 'E0E62E': 'TCT mobile ltd', '387862': 'Sony Mobile Communications Inc', 'E42D7B': 'China Mobile IOT Company Limited', 'C464E3': 'Texas Instruments', '8817A3': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '88A9A7': 'IEEE Registration Authority', 'EC8914': 'HUAWEI TECHNOLOGIES CO.,LTD', 'B89436': 'HUAWEI TECHNOLOGIES CO.,LTD', '501479': 'iRobot Corporation ', '6084BD': 'BUFFALO.INC', '347ECA': 'NEXTWILL', 'B42EF8': 'Eline Technology co.Ltd', 'A4D4B2': 'Shenzhen MeiG Smart Technology Co.,Ltd', '8CF773': 'Nokia', 'DCDD24': 'Energica Motor Company SpA', '641CB0': 'Samsung Electronics Co.,Ltd', '903A72': 'Ruckus Wireless', 'CC3B58': 'Curiouser Products Inc', '4CEFC0': 'Amazon Technologies Inc.', '8C5973': 'Zyxel Communications Corporation', '24181D': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', '58D759': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F89066': 'Nain Inc.', '7006AC': 'Eastcompeace Technology Co., Ltd', '2802D8': 'Samsung Electronics Co.,Ltd', 'DCE533': 'IEEE Registration Authority', 'D8445C': 'DEV Tecnologia Ind Com Man Eq LTDA', '509551': 'ARRIS Group, Inc.', '804126': 'HUAWEI TECHNOLOGIES CO.,LTD', 'ACF970': 'HUAWEI TECHNOLOGIES CO.,LTD', '7C3953': 'zte corporation', '38E1AA': 'zte corporation', '48C796': 'Samsung Electronics Co.,Ltd', 'F4C248': 'Samsung Electronics Co.,Ltd', 'F47190': 'Samsung Electronics Co.,Ltd', 'C4FFBC': 'IEEE Registration Authority', '0C2369': 'Honeywell SPS', '04C9D9': 'Dish Technologies Corp', '7055F8': 'Cerebras Systems Inc', '6C54CD': 'LAMPEX ELECTRONICS LIMITED', '000889': 'Dish Technologies Corp', 'F0B5B7': 'Disruptive Technologies Research AS', 'B4DEDF': 'zte corporation', '283B82': 'D-Link International', 'D4909C': 'Apple, Inc.', 'E4E0A6': 'Apple, Inc.', '580454': 'ICOMM HK LIMITED', '3C9A77': 'Technicolor CH USA Inc.', 'C477AF': 'Advanced Digital Broadcast SA', 'A486AE': 'Quectel Wireless Solutions', '94290C': 'Shenyang wisdom Foundation Technology Development Co., Ltd.', '9C32CE': 'CANON INC.', '20E09C': 'Nokia', '2CFDA1': 'ASUSTek COMPUTER INC.', '3807D4': 'Zeppelin Systems GmbH', '04197F': 'Grasphere Japan', '5C0038': 'Viasat Group S.p.A.', '8CEC4B': 'Dell Inc.', '34415D': 'Intel Corporate', '005091': 'NETACCESS, INC.', 'B85001': 'Extreme Networks, Inc.', '802BF9': 'Hon Hai Precision Ind. Co.,Ltd.', '54B802': 'Samsung Electronics Co.,Ltd', '10CEA9': 'Texas Instruments', '805E0C': 'YEALINK(XIAMEN) NETWORK TECHNOLOGY CO.,LTD.', '6C49C1': 'o2ones Co., Ltd.', '70EEA3': 'Eoptolink Technology Inc. Ltd,', '7047E9': 'vivo Mobile Communication Co., Ltd.', '5C521E': 'Nintendo Co.,Ltd', '14444A': 'Apollo Seiko Ltd.', '3C2C99': 'Edgecore Networks Corporation', '88D039': 'TCL Technoly Electronics(Huizhou).,Ltd', '683E02': 'SIEMENS AG, Digital Factory, Motion Control System', '000261': 'Tilgin AB', '0014C3': 'Seagate Technology', '0004CF': 'Seagate Technology', '002037': 'Seagate Technology', '5C81A7': 'Network Devices Pty Ltd', '5C0C0E': 'Guizhou Huaxintong Semiconductor Technology Co Ltd', '503CEA': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'D096FB': 'DASAN Network Solutions', '00E091': 'LG Electronics', '38437D': 'Compal Broadband Networks, Inc.', '506F98': 'Sehaj Synergy Technologies Private Limited', '4CAE1C': 'SaiNXT Technologies LLP', '142882': 'MIDICOM ELECTRONICS CO.LTD', 'EC8193': 'Logitech, Inc', '6CDD30': 'Cisco Systems, Inc', '6C4E86': 'Third Millennium Systems Ltd.', '5C86C1': 'DONGGUAN SOLUM ELECTRONICS CO.,LTD', '5C7776': 'TCT mobile ltd', '70E56E': 'Texas Instruments', '547DCD': 'Texas Instruments', '00AECD': 'Pensando Systems', 'FC9DD8': 'Beijing TongTongYiLian Science and Technology Ltd.', 'DC2834': 'HAKKO Corporation', '84509A': 'Easy Soft TV Co., Ltd', '001730': 'Automation Electronics', '30E48E': 'Vodafone UK', '449160': 'Murata Manufacturing Co., Ltd.', 'B4F1DA': 'LG Electronics (Mobile Communications)', 'C863F1': 'Sony Interactive Entertainment Inc.', 'DCE1AD': 'Shenzhen Wintop Photoelectric Technology Co., Ltd', '948854': 'Texas Instruments', '001D0D': 'Sony Interactive Entertainment Inc.', 'B0FC36': 'CyberTAN Technology Inc.', '001DF4': 'Magellan Technology Pty Limited', '6C05D5': 'Ethertronics Inc', '348584': 'Aerohive Networks Inc.', '0019C2': 'Equustek Solutions, Inc.', '80000B': 'Intel Corporate', 'ECB0E1': 'Ciena Corporation', '78DDD9': 'Guangzhou Shiyuan Electronics Co., Ltd. ', 'F8B7E2': 'Cisco Systems, Inc', 'F82055': 'Green Information System', '74E19A': 'Fiberhome Telecommunication Technologies Co.,LTD', '000097': 'Dell EMC', '8CCF09': 'Dell EMC', '8C839D': 'SHENZHEN XINYUPENG ELECTRONIC TECHNOLOGY CO., LTD', 'B0C19E': 'zte corporation', '0C3747': 'zte corporation', 'ACA667': 'Electronic Systems Protection, Inc.', '0081F9': 'Texas Instruments', 'ECB5FA': 'Philips Lighting BV', '44CD0E': 'FLEXTRONICS MANUFACTURING(ZHUHAI)CO.,LTD.', 'E8825B': 'ARRIS Group, Inc.', '70991C': 'Shenzhen Honesty Electronics Co.,Ltd', '80B03D': 'Apple, Inc.', 'E49ADC': 'Apple, Inc.', 'ACE4B5': 'Apple, Inc.', 'D0D2B0': 'Apple, Inc.', '001CAE': 'WiChorus, Inc.', '7CDD76': 'Suzhou Hanming Technologies Co., Ltd.', '246880': 'Braveridge.co.,ltd.', 'F092B4': 'SICHUAN TIANYI COMHEART TELECOMCO., LTD', 'E8DF70': 'AVM Audiovisuelles Marketing und Computersysteme GmbH', '28AD3E': 'Shenzhen TONG BO WEI Technology CO.,LTD', '001C56': 'Pado Systems, Inc.', 'F06D78': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '7844FD': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'ECF8EB': 'SICHUAN TIANYI COMHEART TELECOMCO., LTD', '707D95': 'Shenzhen City LinwlanTechnology Co. Ltd.', '2C431A': 'Shenzhen YOUHUA Technology Co., Ltd', 'A8D3C8': 'Topcon Electronics GmbH & Co. KG', 'D05995': 'Fiberhome Telecommunication Technologies Co.,LTD', '18CC88': 'Hitachi Johnson Controls Air ', '80C755': 'Panasonic Appliances Company', 'F0BD2E': 'H+S Polatis Ltd', '746EE4': 'Asia Vital Components Co.,Ltd.', '0040E4': 'E-M TECHNOLOGY, INC.', '984B4A': 'ARRIS Group, Inc.', 'E084F3': 'High Grade Controls Corporation', '38A6CE': 'BSkyB Ltd', '3456FE': 'Cisco Meraki', '70708B': 'Cisco Systems, Inc', '389F5A': 'C-Kur TV Inc.', 'D843ED': 'Suzuken', 'BC4101': 'Shenzhen TINNO Mobile Technology Corp.', '043A0D': 'SM Optics S.r.l.', '448F17': 'Samsung Electronics Co., Ltd. ARTIK', '00FC8B': 'Amazon Technologies Inc.', '0076B1': 'Somfy-Protect By Myfox SAS', '6CC147': 'Xiamen Hanin Electronic Technology Co., Ltd', 'A072E4': 'NJ SYSTEM CO.,LTD', '4C1365': 'Emplus Technologies', 'CCF957': 'u-blox AG', '0C62A6': 'Hui Zhou Gaoshengda Technology Co.,LTD', '18132D': 'zte corporation', '74D21D': 'HUAWEI TECHNOLOGIES CO.,LTD', '1878D4': 'Verizon ', 'B8D94D': 'Sagemcom Broadband SAS', '3890A5': 'Cisco Systems, Inc', 'C0742B': 'SHENZHEN XUNLONG SOFTWARE CO.,LIMITED', '5C6776': 'IDS Imaging Development Systems GmbH', '44EAD8': 'Texas Instruments', '189BA5': 'IEEE Registration Authority', 'A491B1': 'Technicolor', '1C7022': 'Murata Manufacturing Co., Ltd.', 'CC9891': 'Cisco Systems, Inc', '28BF89': 'Fiberhome Telecommunication Technologies Co.,LTD', '903DBD': 'SECURE METERS LIMITED', '002294': 'KYOCERA CORPORATION ', '3889DC': 'Opticon Sensors Europe B.V.', '8C4500': 'Murata Manufacturing Co., Ltd.', '1CDDEA': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '940006': 'jinyoung', '74E5F9': 'Intel Corporate', '20040F': 'Dell Inc.', 'A43412': 'Thales Alenia Space', '8C8590': 'Apple, Inc.', 'BC88C3': 'Ningbo Dooya Mechanic & Electronic Technology Co., Ltd', '38CD07': 'Beijing FaceCam Technology Co., Ltd.', '00D060': 'Panasonic Europe Ltd.', 'ECFA03': 'FCA', '6C96CF': 'Apple, Inc.', '681F40': 'Blu Wireless Technology Ltd', '90ADF7': 'vivo Mobile Communication Co., Ltd.', '40CE24': 'Cisco Systems, Inc', '3432E6': 'Panasonic Industrial Devices Europe GmbH', '40017A': 'Cisco Systems, Inc', '78886D': 'Apple, Inc.', '20EE28': 'Apple, Inc.', 'B4F61C': 'Apple, Inc.', '08F4AB': 'Apple, Inc.', 'FC017C': 'Hon Hai Precision Ind. Co.,Ltd.', '90324B': 'Hon Hai Precision Ind. Co.,Ltd.', '602E20': 'HUAWEI TECHNOLOGIES CO.,LTD', 'E472E2': 'HUAWEI TECHNOLOGIES CO.,LTD', '00127D': 'MobileAria', 'F86465': 'Anova Applied Electronics, Inc.', '002060': 'ALCATEL ITALIA S.p.A.', 'A08869': 'Intel Corporate', '508F4C': 'Xiaomi Communications Co Ltd', 'A47758': 'Ningbo Freewings Technologies Co.,Ltd', '58A0CB': 'TrackNet, Inc', '000CEC': 'Spectracom Corp.', 'E06089': 'Cloudleaf, Inc.', '783690': 'Yulong Computer Telecommunication Scientific (Shenzhen) Co.,Ltd', 'BC54FC': 'SHENZHEN MERCURY COMMUNICATION TECHNOLOGIES CO.,LTD.', '0C4B54': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'E4F004': 'Dell Inc.', '94E36D': 'Texas Instruments', 'F0F8F2': 'Texas Instruments', '341513': 'Texas Instruments', '74819A': 'PT. Hartono Istana Teknologi', '283545': 'SHENZHEN CHUANGWEI-RGB ELECTRONICS CO.,LTD', '044F4C': 'HUAWEI TECHNOLOGIES CO.,LTD', '18B81F': 'ARRIS Group, Inc.', '1C151F': 'HUAWEI TECHNOLOGIES CO.,LTD', '008BFC': 'mixi,Inc.', 'A82BB5': 'Edgecore Networks Corporation', '60F677': 'Intel Corporate', 'E8E1E2': 'Energotest', '7811DC': 'XIAOMI Electronics,CO.,LTD', 'D463C6': 'Motorola Mobility LLC, a Lenovo Company', 'F844E3': 'Taicang T&W Electronics', '24A534': 'SynTrust Tech International Ltd. ', 'C444A0': 'Cisco Systems, Inc', '18742E': 'Amazon Technologies Inc.', '90A365': 'HMD Global Oy', 'DC44B6': 'Samsung Electronics Co.,Ltd', '1007B6': 'Samsung Electronics Co.,Ltd', '342D0D': 'Samsung Electronics Co.,Ltd', 'A44CC8': 'Dell Inc.', 'D837BE': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', 'D4684D': 'Ruckus Wireless', '8C0C90': 'Ruckus Wireless', '6CAAB3': 'Ruckus Wireless', '001392': 'Ruckus Wireless', '085114': 'QINGDAO TOPSCOMM COMMUNICATION CO., LTD', 'D05A00': 'Technicolor CH USA Inc.', '70788B': 'vivo Mobile Communication Co., Ltd.', '4859A4': 'zte corporation', '54BD79': 'Samsung Electronics Co.,Ltd', 'A0423F': 'Tyan Computer Corp', '70F11C': 'Shenzhen Ogemray Technology Co.,Ltd', '7065A3': 'Kandao lightforge Co., Ltd.', '14144B': 'Ruijie Networks Co.,LTD', '74D0DC': 'Ericsson AB', 'C08ADE': 'Ruckus Wireless', '001D2E': 'Ruckus Wireless', 'B4E62A': 'LG Innotek', 'A0C5F2': 'IEEE Registration Authority', 'A86B7C': 'SHENZHEN FENGLIAN TECHNOLOGY CO., LTD.', 'B03956': 'NETGEAR', '3C0CDB': 'UNIONMAN TECHNOLOGY CO.,LTD', 'EC42B4': 'ADC Corporation', '60DA83': 'Hangzhou H3C Technologies Co., Limited', '2C5731': ' Wingtech Group (HongKong)Limited', 'CC4639': 'WAAV, Inc.', 'AC9E17': 'ASUSTek COMPUTER INC.', '641666': 'Nest Labs Inc.', 'D8DF7A': 'Quest Software, Inc.', 'E4A749': 'Palo Alto Networks', '145BE1': 'nyantec GmbH', 'A0239F': 'Cisco Systems, Inc', '70F35A': 'Cisco Systems, Inc', 'A0341B': 'Adero Inc', 'A0AFBD': 'Intel Corporate', '7C8BCA': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'B04E26': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'B089C2': 'Zyptonite', 'F023B9': 'IEEE Registration Authority', 'FC4DD4': 'Universal Global Scientific Industrial Co., Ltd.', 'A4F4C2': 'VNPT TECHNOLOGY', '8C147D': 'IEEE Registration Authority', '30074D': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', '1C1FD4': 'LifeBEAM Technologies LTD', '009AD2': 'Cisco Systems, Inc', '447F77': 'Connected Home', 'E8B6C2': 'Juniper Networks', '947BE7': 'Samsung Electronics Co.,Ltd', '5092B9': 'Samsung Electronics Co.,Ltd', 'DC74A8': 'Samsung Electronics Co.,Ltd', 'E83935': 'Hewlett Packard', '00180A': 'Cisco Meraki', '5C6A80': 'Zyxel Communications Corporation', 'D860B3': 'Guangdong Global Electronic Technology CO.,LTD', '64351C': 'e-CON SYSTEMS INDIA PVT LTD', '60BA18': 'nextLAP GmbH', '44AA50': 'Juniper Networks', '84CD62': 'ShenZhen IDWELL Technology CO.,Ltd', 'A8D579': 'Beijing Chushang Science and Technology Co.,Ltd', '4448C1': 'Hewlett Packard Enterprise', '481063': 'NTT Innovation Institute, Inc.', 'A08E78': 'Sagemcom Broadband SAS', '88D50C': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'D428D5': 'TCT mobile ltd', '9CAF6F': 'ITEL MOBILE LIMITED', 'FC539E': 'Shanghai Wind Technologies Co.,Ltd', '605317': 'Sandstone Technologies', '907065': 'Texas Instruments', '18A958': 'PROVISION THAI CO., LTD.', '74C9A3': 'Fiberhome Telecommunication Technologies Co.,LTD', 'EC8A4C': 'zte corporation', 'D45F25': 'Shenzhen YOUHUA Technology Co., Ltd', '40C8CB': 'AM Telecom co., Ltd.', '2CABEB': 'Cisco Systems, Inc', 'C83A6B': 'Roku, Inc', 'B4C6F8': 'Axilspot Communication', '9CE951': 'Shenzhen Sang Fei Consumer Communications Ltd., Co.', 'B8D50B': 'Sunitec Enterprise Co.,Ltd', 'BC66DE': 'Shadow Creator Information Technology Co.,Ltd.', '1868CB': 'Hangzhou Hikvision Digital Technology Co.,Ltd.', 'C4AE12': 'Samsung Electronics Co.,Ltd', '001FA4': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', 'D4DCCD': 'Apple, Inc.', '484BAA': 'Apple, Inc.', 'DCA904': 'Apple, Inc.', '6CAB31': 'Apple, Inc.', '4C74BF': 'Apple, Inc.', '04946B': 'TECNO MOBILE LIMITED', 'A04C5B': 'Shenzhen TINNO Mobile Technology Corp.', '488803': 'ManTechnology Inc.', 'B436E3': 'KBVISION GROUP', '94D299': 'Techmation Co.,Ltd.', '341A35': 'Fiberhome Telecommunication Technologies Co.,LTD', '2C029F': '3ALogics', '64D154': 'Routerboard.com', '58D9D5': 'Tenda Technology Co.,Ltd.Dongguan branch', '6C4B90': 'LiteON', '00050F': 'Tanaka S/S Ltd.', '989E63': 'Apple, Inc.', '886B6E': 'Apple, Inc.', 'F4E4AD': 'zte corporation', '28FF3E': 'zte corporation', 'B8D7AF': 'Murata Manufacturing Co., Ltd.', 'D4AE05': 'Samsung Electronics Co.,Ltd', 'E048AF': 'Premietech Limited', '2C3311': 'Cisco Systems, Inc', '5082D5': 'Apple, Inc.', 'F0EE10': 'Samsung Electronics Co.,Ltd', 'C4700B': 'GUANGZHOU CHIP TECHNOLOGIES CO.,LTD', '3CA067': 'Liteon Technology Corporation', 'BC024A': 'HMD Global Oy', '949901': 'Shenzhen YITOA Digital Appliance CO.,LTD', 'F85971': 'Intel Corporate', '1005CA': 'Cisco Systems, Inc', '7894B4': 'Sercomm Corporation.', '443708': 'MRV Comunications', '285F2F': 'RNware Co.,Ltd.', '500FF5': 'Tenda Technology Co.,Ltd.Dongguan branch', 'BC452E': 'Knowledge Development for POF S.L.', 'DCC64B': 'HUAWEI TECHNOLOGIES CO.,LTD', '043389': 'HUAWEI TECHNOLOGIES CO.,LTD', '00A068': 'BHP LIMITED', '703ACB': 'Google, Inc.', '706DEC': 'Wifi-soft LLC', 'B0C205': 'BIONIME', '94F551': 'Cadi Scientific Pte Ltd', '105AF7': 'ADB Italia ', 'B81DAA': 'LG Electronics (Mobile Communications)', '00E400': 'Sichuan Changhong Electric Ltd.', '2C55D3': 'HUAWEI TECHNOLOGIES CO.,LTD', '00C024': 'EDEN SISTEMAS DE COMPUTACAO SA', '7C4685': 'Motorola (Wuhan) Mobility Technologies Communication Co., Ltd.', '1C1EE3': 'Hui Zhou Gaoshengda Technology Co.,LTD', '44032C': 'Intel Corporate', '7868F7': 'YSTen Technology Co.,Ltd', '004BF3': 'SHENZHEN MERCURY COMMUNICATION TECHNOLOGIES CO.,LTD.', '08CCA7': 'Cisco Systems, Inc', '0896AD': 'Cisco Systems, Inc', '0823B2': 'vivo Mobile Communication Co., Ltd.', '88C3B3': 'SOVICO', 'E05124': 'NXP Semiconductors', '001DA3': 'SabiOso', '542F8A': 'TELLESCOM INDUSTRIA E COMERCIO EM TELECOMUNICACAO ', '6014B3': 'CyberTAN Technology Inc.', '105611': 'ARRIS Group, Inc.', '347877': 'O-Net Communications (Shenzhen) Limited', '0020CC': 'DIGITAL SERVICES, LTD.', '689FF0': 'zte corporation', '5CAF06': 'LG Electronics (Mobile Communications)', '00179B': 'CHANT SINCERE CO.,LTD', '1C398A': 'Fiberhome Telecommunication Technologies Co.,LTD', 'E865D4': 'Tenda Technology Co.,Ltd.Dongguan branch', '24D51C': 'Zhongtian broadband technology co., LTD ', 'EC43F6': 'Zyxel Communications Corporation', '60C658': 'PHYTRONIX Co.,Ltd.', 'FCB58A': 'Wapice Ltd.', 'A462DF': 'DS Global. Co., LTD', '4C1694': 'shenzhen sibituo Technology Co., Ltd', 'C81451': 'HUAWEI TECHNOLOGIES CO.,LTD', '44D437': 'Inteno Broadband Technology AB', 'ECE154': 'Beijing Unisound Information Technology Co.,Ltd.', '6C160E': 'ShotTracker', '803A0A': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '0C73BE': 'Dongguan Haimai Electronie Technology Co.,Ltd', '286F7F': 'Cisco Systems, Inc', 'F0C850': 'HUAWEI TECHNOLOGIES CO.,LTD', '00014F': 'Adtran Inc', '285261': 'Cisco Systems, Inc', 'C8AA55': 'Hunan Comtom Electronic Incorporated Co.,Ltd', '20780B': 'Delta Faucet Company', '8809AF': 'Masimo Corporation', '2CD02D': 'Cisco Systems, Inc', '9CCC83': 'Juniper Networks', '2C6373': 'SICHUAN TIANYI COMHEART TELECOMCO., LTD', '24A7DC': 'BSkyB Ltd', '64DBA0': 'Select Comfort', 'F8983A': 'Leeman International (HongKong) Limited', '4CECEF': 'Soraa, Inc.', '1CEFCE': 'bebro electronic GmbH', '98B6E9': 'Nintendo Co.,Ltd', 'F015B9': 'PlayFusion Limited', '64B0A6': 'Apple, Inc.', '7C04D0': 'Apple, Inc.', '84FCAC': 'Apple, Inc.', 'DC0C5C': 'Apple, Inc.', '70700D': 'Apple, Inc.', '30E171': 'Hewlett Packard', '186590': 'Apple, Inc.', 'F86214': 'Apple, Inc.', '784F43': 'Apple, Inc.', '404D7F': 'Apple, Inc.', '001D72': 'Wistron Corporation', 'D8197A': 'Nuheara Ltd', '4C38D5': 'MITAC COMPUTING TECHNOLOGY CORPORATION', '54B56C': "Xi'an NovaStar Tech Co., Ltd", '344CC8': 'Echodyne Corp', '64136C': 'zte corporation', '04B648': 'ZENNER', '98F199': 'NEC Platforms, Ltd.', '1840A4': 'Shenzhen Trylong Smart Science and Technology Co., Ltd.', '1C48CE': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'C80CC8': 'HUAWEI TECHNOLOGIES CO.,LTD', '0425C5': 'HUAWEI TECHNOLOGIES CO.,LTD', '603E7B': 'Gafachi, Inc.', '4C7487': 'Leader Phone Communication Technology Co., Ltd.', 'AC83F3': 'AMPAK Technology, Inc.', 'CC8CDA': 'Shenzhen Wei Da Intelligent Technology Go.,Ltd', 'D436DB': 'Jiangsu Toppower Automotive Electronics Co., Ltd', '2CDCAD': 'Wistron Neweb Corporation', '6C5C14': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'E80945': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', 'B0A2E7': 'Shenzhen TINNO Mobile Technology Corp.', '7C2587': 'chaowifi.com', '2C2131': 'Juniper Networks', '00501E': 'Grass Valley, A Belden Brand', 'EC0D9A': 'Mellanox Technologies, Inc.', '90D7BE': 'Wavelab Global Inc.', '244E7B': 'IEEE Registration Authority', '30AEA4': 'Espressif Inc.', '206C8A': 'Aerohive Networks Inc.', '3CFA43': 'HUAWEI TECHNOLOGIES CO.,LTD', '145F94': 'HUAWEI TECHNOLOGIES CO.,LTD', '001F82': 'Cal-Comp Electronics & Communications Company Ltd.', '883C1C': 'MERCURY CORPORATION', '002144': 'SS Telecoms', '006BF1': 'Cisco Systems, Inc', '2834A2': 'Cisco Systems, Inc', '7823AE': 'ARRIS Group, Inc.', '20719E': 'SF Technology Co.,Ltd', '2CC260': 'Oracle Corporation ', '3C3F51': '2CRSI', '3C2AF4': 'Brother Industries, LTD.', 'C0854C': 'Ragentek Technology Group', '0024AC': 'Hangzhou DPtech Technologies Co., Ltd.', '50584F': 'waytotec,Inc.', '085DDD': 'MERCURY CORPORATION', '8C60E7': 'MPGIO CO.,LTD', 'CC9470': 'Kinestral Technologies, Inc.', 'B439D6': 'ProCurve Networking by HP', '34F39A': 'Intel Corporate', 'D816C1': 'DEWAV (HK) ELECTRONICS LIMITED', 'CC61E5': 'Motorola Mobility LLC, a Lenovo Company', '8C8ABB': 'Beijing Orient View Technology Co., Ltd.', '00039B': 'NetChip Technology, Inc.', '44D9E7': 'Ubiquiti Networks Inc.', '24A43C': 'Ubiquiti Networks Inc.', '9C8BA0': 'Apple, Inc.', 'CC088D': 'Apple, Inc.', '38A4ED': 'Xiaomi Communications Co Ltd', 'B89919': '7signal Solutions, Inc', '40FE0D': 'MAXIO', 'AC64DD': 'IEEE Registration Authority', '94B819': 'Nokia', '787D48': 'ITEL MOBILE LIMITED', '8871E5': 'Amazon Technologies Inc.', 'BC39D9': 'Z-TEC', '609AC1': 'Apple, Inc.', '748D08': 'Apple, Inc.', '00B0EE': 'Ajile Systems, Inc.', '0418D6': 'Ubiquiti Networks Inc.', '20DBAB': 'Samsung Electronics Co., Ltd.', '383A21': 'IEEE Registration Authority', 'D8380D': 'SHENZHEN IP-COM Network Co.,Ltd', '88AD43': 'PEGATRON CORPORATION', 'B4EFFA': 'Lemobile Information Technology (Beijing) Co., Ltd.', '6C71BD': 'EZELINK TELECOM', '60EFC6': 'Shenzhen Chima Technologies Co Limited', '001FC6': 'ASUSTek COMPUTER INC.', 'B0C128': 'Adler ELREHA GmbH', '3087D9': 'Ruckus Wireless', 'FCCAC4': 'LifeHealth, LLC', 'F0D9B2': 'EXO S.A.', 'E4C801': 'BLU Products Inc', 'F09838': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C80E14': 'AVM Audiovisuelles Marketing und Computersysteme GmbH', 'AC63BE': 'Amazon Technologies Inc.', 'F81D78': 'IEEE Registration Authority', '38F7B2': 'SEOJUN ELECTRIC', '101250': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '7802B7': 'ShenZhen Ultra Easy Technology CO.,LTD', '646184': 'VELUX', 'E8E5D6': 'Samsung Electronics Co.,Ltd', 'C87E75': 'Samsung Electronics Co.,Ltd', '00265F': 'Samsung Electronics Co.,Ltd', '00233A': 'Samsung Electronics Co.,Ltd', '086A0A': 'ASKEY COMPUTER CORP', '98E7F4': 'Hewlett Packard', '0007AB': 'Samsung Electronics Co.,Ltd', '002486': 'DesignArt Networks', '002478': 'Mag Tech Electronics Co Limited', '382DD1': 'Samsung Electronics Co.,Ltd', '001B2C': 'ATRON electronic GmbH', '9034FC': 'Hon Hai Precision Ind. Co.,Ltd.', '001427': 'JazzMutant', '001E84': 'Pika Technologies Inc.', '10DDB1': 'Apple, Inc.', '002329': 'DDRdrive LLC', '0026AD': 'Arada Systems, Inc.', 'FC1F19': 'SAMSUNG ELECTRO MECHANICS CO., LTD.', '840B2D': 'SAMSUNG ELECTRO MECHANICS CO., LTD.', '206432': 'SAMSUNG ELECTRO MECHANICS CO., LTD.', 'B407F9': 'SAMSUNG ELECTRO MECHANICS CO., LTD.', '889FFA': 'Hon Hai Precision Ind. Co.,Ltd.', '8C7CB5': 'Hon Hai Precision Ind. Co.,Ltd.', 'C44619': 'Hon Hai Precision Ind. Co.,Ltd.', '506313': 'Hon Hai Precision Ind. Co.,Ltd.', '60D819': 'Hon Hai Precision Ind. Co.,Ltd.', 'F82FA8': 'Hon Hai Precision Ind. Co.,Ltd.', '0C84DC': 'Hon Hai Precision Ind. Co.,Ltd.', '00166C': 'Samsung Electronics Co.,Ltd', '181EB0': 'Samsung Electronics Co.,Ltd', '247F20': 'Sagemcom Broadband SAS', 'E8039A': 'Samsung Electronics Co.,Ltd', '30CDA7': 'Samsung Electronics Co.,Ltd', '001247': 'Samsung Electronics Co.,Ltd', '001599': 'Samsung Electronics Co.,Ltd', '0012FB': 'Samsung Electronics Co.,Ltd', 'D0667B': 'Samsung Electronics Co.,Ltd', 'B85E7B': 'Samsung Electronics Co.,Ltd', 'E492FB': 'Samsung Electronics Co.,Ltd', '6CB7F4': 'Samsung Electronics Co.,Ltd', '2C4401': 'Samsung Electronics Co.,Ltd', 'B8D9CE': 'Samsung Electronics Co.,Ltd', '1C66AA': 'Samsung Electronics Co.,Ltd', '3C8BFE': 'Samsung Electronics Co.,Ltd', 'D4E8B2': 'Samsung Electronics Co.,Ltd', '1489FD': 'Samsung Electronics Co.,Ltd', 'BC851F': 'Samsung Electronics Co.,Ltd', '0015B9': 'Samsung Electronics Co.,Ltd', '002491': 'Samsung Electronics Co.,Ltd', '002339': 'Samsung Electronics Co.,Ltd', '5001BB': 'Samsung Electronics Co.,Ltd', 'C40142': 'MaxMedia Technology Limited', '8430E5': 'SkyHawke Technologies, LLC', '1C77F6': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '58E326': 'Compass Technologies Inc.', '001B2A': 'Cisco Systems, Inc', '749DDC': '2Wire Inc', '14DDE5': 'MPMKVVCL', '001A09': 'Wayfarer Transit Systems Ltd', '742344': 'Xiaomi Communications Co Ltd', '54880E': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', 'F025B7': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', 'F04347': 'HUAWEI TECHNOLOGIES CO.,LTD', '9CB2B2': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A8C83A': 'HUAWEI TECHNOLOGIES CO.,LTD', 'BC72B1': 'Samsung Electronics Co.,Ltd', '78F7BE': 'Samsung Electronics Co.,Ltd', '684898': 'Samsung Electronics Co.,Ltd', '3423BA': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', '003676': 'ARRIS Group, Inc.', 'FC8E7E': 'ARRIS Group, Inc.', 'FC6FB7': 'ARRIS Group, Inc.', 'D42C0F': 'ARRIS Group, Inc.', '400E85': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', 'C8BA94': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', '843838': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', 'A055DE': 'ARRIS Group, Inc.', '80F503': 'ARRIS Group, Inc.', '44AAF5': 'ARRIS Group, Inc.', 'E09DFA': 'Wanan Hongsheng Electronic Co.Ltd', '5C3C27': 'Samsung Electronics Co.,Ltd', '70A84C': 'MONAD., Inc.', '84C7EA': 'Sony Mobile Communications Inc', '24E43F': 'Wenzhou Kunmei Communication Technology Co.,Ltd.', '287AEE': 'ARRIS Group, Inc.', '88797E': 'Motorola Mobility LLC, a Lenovo Company', '305890': 'Frontier Silicon Ltd', '708BCD': 'ASUSTek COMPUTER INC.', '00258B': 'Mellanox Technologies, Inc.', '00562B': 'Cisco Systems, Inc', 'E8FD90': 'Turbostor', '2CAC44': 'CONEXTOP', 'D013FD': 'LG Electronics (Mobile Communications)', 'BC644B': 'ARRIS Group, Inc.', '606405': 'Texas Instruments', '1899F5': 'Sichuan Changhong Electric Ltd.', '0025C3': '21168', '000F57': 'CABLELOGIC Co., Ltd.', '000342': 'Nortel Networks', 'A48269': 'Datrium, Inc.', '10E68F': 'KWANGSUNG ELECTRONICS KOREA CO.,LTD.', '4CFACA': 'Cambridge Industries(Group) Co.,Ltd.', '18ABF5': 'Ultra Electronics Electrics', 'B03EB0': 'MICRODIA Ltd.', '001591': 'RLW Inc.', '00182E': 'XStreamHD', '001283': 'Nortel Networks', '0011F9': 'Nortel Networks', '001158': 'Nortel Networks', '000F6A': 'Nortel Networks', '000E62': 'Nortel Networks', '000CF8': 'Nortel Networks', '0026F1': 'ProCurve Networking by HP', '380DD4': 'Primax Electronics Ltd.', '98FDB4': 'Primax Electronics Ltd.', 'D8C46A': 'Murata Manufacturing Co., Ltd.', 'D8FB68': 'Cloud Corner Ltd.', '685388': 'P&S Technology', '982F3C': 'Sichuan Changhong Electric Ltd.', '14C1FF': 'ShenZhen QianHai Comlan communication Co.,LTD', '000417': 'ELAU AG', 'ECFAAA': 'The IMS Company', 'F00786': 'Shandong Bittel Electronics Co., Ltd', '00D0F6': 'Nokia', '54A619': 'Alcatel-Lucent Shanghai Bell Co., Ltd', '000997': 'Nortel Networks', '001CEB': 'Nortel Networks', '001C17': 'Nortel Networks', '001A8F': 'Nortel Networks', '0017D1': 'Nortel Networks', '888322': 'Samsung Electronics Co.,Ltd', 'E89309': 'Samsung Electronics Co.,Ltd', '0014C7': 'Nortel Networks', '001DAF': 'Nortel Networks', '88A6C6': 'Sagemcom Broadband SAS', '94D469': 'Cisco Systems, Inc', '882BD7': 'ADDÉNERGIE TECHNOLOGIES', '0090CC': 'PLANEX COMMUNICATIONS INC.', '2057AF': 'Shenzhen FH-NET OPTOELECTRONICS CO.,LTD', '54DC1D': 'Yulong Computer Telecommunication Scientific (Shenzhen) Co.,Ltd', 'ACA213': 'Shenzhen Bilian electronic CO.,LTD', '3C3300': 'Shenzhen Bilian electronic CO.,LTD', '6CD032': 'LG Electronics', '3CBDD8': 'LG ELECTRONICS INC', '344DF7': 'LG Electronics (Mobile Communications)', '583F54': 'LG Electronics (Mobile Communications)', '0022CF': 'PLANEX COMMUNICATIONS INC.', 'E417D8': '8BITDO TECHNOLOGY HK LIMITED', '9CD332': 'PLC Technology Ltd', '38F8CA': 'OWIN Inc.', '44334C': 'Shenzhen Bilian electronic CO.,LTD', '64899A': 'LG Electronics (Mobile Communications)', '002105': 'Alcatel-Lucent IPD', '001BC5': 'IEEE Registration Authority', '48DF37': 'Hewlett Packard Enterprise', 'C0E42D': 'TP-LINK TECHNOLOGIES CO.,LTD.', '8CA6DF': 'TP-LINK TECHNOLOGIES CO.,LTD.', '8416F9': 'TP-LINK TECHNOLOGIES CO.,LTD.', '18D6C7': 'TP-LINK TECHNOLOGIES CO.,LTD.', '78C3E9': 'Samsung Electronics Co.,Ltd', '8C1ABF': 'Samsung Electronics Co.,Ltd', '30CBF8': 'Samsung Electronics Co.,Ltd', 'A0CBFD': 'Samsung Electronics Co.,Ltd', 'E45D75': 'Samsung Electronics Co.,Ltd', '00E04D': 'INTERNET INITIATIVE JAPAN, INC', 'F8A9D0': 'LG Electronics (Mobile Communications)', 'CCFA00': 'LG Electronics (Mobile Communications)', '74A722': 'LG Electronics (Mobile Communications)', 'F01C13': 'LG Electronics (Mobile Communications)', '58FCDB': 'IEEE Registration Authority', 'B0C5CA': 'IEEE Registration Authority', '7419F8': 'IEEE Registration Authority', 'A816B2': 'LG Electronics (Mobile Communications)', '64BC0C': 'LG Electronics (Mobile Communications)', '90C682': 'IEEE Registration Authority', 'C01ADA': 'Apple, Inc.', '2C600C': 'QUANTA COMPUTER INC.', '000031': 'QPSX COMMUNICATIONS, LTD.', '000E1E': 'QLogic Corporation', '0014D1': 'TRENDnet, Inc.', '00238B': 'QUANTA COMPUTER INC.', '001E68': 'QUANTA COMPUTER INC.', 'CC52AF': 'Universal Global Scientific Industrial Co., Ltd.', '001C14': 'VMware, Inc.', '005056': 'VMware, Inc.', '00121C': 'PARROT SA', '9003B7': 'PARROT SA', '208756': 'SIEMENS AG', '74B472': 'CIESSE', 'FCF152': 'Sony Corporation', '483C0C': 'HUAWEI TECHNOLOGIES CO.,LTD', '309BAD': 'BBK EDUCATIONAL ELECTRONICS CORP.,LTD.', '001BB1': 'Wistron Neweb Corporation', '0080F7': 'Zenith Electronics Corporation', 'BC307D': 'Wistron Neweb Corporation', '48A9D2': 'Wistron Neweb Corporation', '80EA23': 'Wistron Neweb Corporation', '002713': 'Universal Global Scientific Industrial Co., Ltd.', 'BC307E': 'Wistron Neweb Corporation', '08952A': 'Technicolor CH USA Inc.', '4432C8': 'Technicolor CH USA Inc.', '38A28C': 'SHENZHEN RF-LINK TECHNOLOGY CO.,LTD.', 'B4A5EF': 'Sercomm Corporation.', '849D64': 'SMC Corporation', '0010C1': 'OI ELECTRIC CO.,LTD', '28BE9B': 'Technicolor CH USA Inc.', 'FC528D': 'Technicolor CH USA Inc.', '506583': 'Texas Instruments', 'B09122': 'Texas Instruments', 'FC51A4': 'ARRIS Group, Inc.', '9857D3': 'HON HAI-CCPBG PRECISION IND.CO.,LTD.', 'FCF528': 'Zyxel Communications Corporation', '00A0C5': 'Zyxel Communications Corporation', 'A09E1A': 'Polar Electro Oy', '1CD6BD': 'LEEDARSON LIGHTING CO., LTD.', 'D0D94F': 'IEEE Registration Authority', '001E04': 'Hanson Research Corporation', '60C0BF': 'ON Semiconductor', 'AC0481': 'Jiangsu Huaxing Electronics Co., Ltd.', '68B35E': 'Shenzhen Neostra Technology Co.Ltd', '408805': 'Motorola Mobility LLC, a Lenovo Company', '24E271': 'Qingdao Hisense Communications Co.,Ltd.', 'BC6010': 'Qingdao Hisense Communications Co.,Ltd.', 'D0FCCC': 'Samsung Electronics Co.,Ltd', '98398E': 'Samsung Electronics Co.,Ltd', '44D1FA': 'Shenzhen Yunlink Technology Co., Ltd', 'F0F644': 'Whitesky Science & Technology Co.,Ltd.', '20F17C': 'HUAWEI TECHNOLOGIES CO.,LTD', '346AC2': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C41CFF': 'Vizio, Inc', 'C09727': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', 'DC293A': 'Shenzhen Nuoshi Technology Co., LTD.', '7C6AF3': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', 'E46251': 'HAO CHENG GROUP LIMITED', '40562D': 'Smartron India Pvt ltd', '3876D1': 'Euronda SpA', 'C4693E': 'Turbulence Design Inc.', '009569': 'LSD Science and Technology Co.,Ltd.', 'B0CF4D': 'MI-Zone Technology Ireland', '289AFA': 'TCT mobile ltd', '001A34': 'Konka Group Co., Ltd.', '0011FC': 'HARTING Electronics GmbH', '002389': 'Hangzhou H3C Technologies Co., Limited', '3CE5A6': 'Hangzhou H3C Technologies Co., Limited', '5CDD70': 'Hangzhou H3C Technologies Co., Limited', '3C8C40': 'Hangzhou H3C Technologies Co., Limited', 'A067BE': 'Sicon srl', 'D8209F': 'Cubro Acronet GesmbH', '8C7716': 'LONGCHEER TELECOMMUNICATION LIMITED', '68FB7E': 'Apple, Inc.', '84A134': 'Apple, Inc.', 'A0D385': 'AUMA Riester GmbH & Co. KG', '1414E6': 'Ningbo Sanhe Digital Co.,Ltd', '002582': 'Maksat Technologies (P) Ltd', '0C5101': 'Apple, Inc.', '2CF0A2': 'Apple, Inc.', '48C049': 'Broad Telecom SA', 'AC6FBB': 'TATUNG Technology Inc.', '001C41': 'scemtec Transponder Technology GmbH', '146308': 'JABIL CIRCUIT (SHANGHAI) LTD.', '001E25': 'INTEK DIGITAL', '00E0CF': 'INTEGRATED DEVICE', '904D4A': 'Sagemcom Broadband SAS', '044E5A': 'ARRIS Group, Inc.', '0060B1': 'Input/Output, Inc.', '547F54': 'INGENICO', '6C2483': 'Microsoft Mobile Oy', '6891D0': 'IEEE Registration Authority', 'E04F43': 'Universal Global Scientific Industrial Co., Ltd.', '38700C': 'ARRIS Group, Inc.', '000E2E': 'Edimax Technology Co. Ltd.', '00065F': 'ECI Telecom Ltd.', '00208F': 'ECI Telecom Ltd.', '844076': 'Drivenets', '001CD7': 'Harman/Becker Automotive Systems GmbH', '003A7D': 'Cisco Systems, Inc', '90C7D8': 'zte corporation', '00185C': 'EDSLAB Technologies', '001A45': 'GN Netcom A/S', '002088': 'GLOBAL VILLAGE COMMUNICATION', '541379': 'Hon Hai Precision Ind. Co.,Ltd.', '001921': 'Elitegroup Computer Systems Co.,Ltd.', '0016EC': 'Elitegroup Computer Systems Co.,Ltd.', '000795': 'Elitegroup Computer Systems Co.,Ltd.', 'FC0F4B': 'Texas Instruments', 'D4883F': 'HDPRO CO., LTD.', '1088CE': 'Fiberhome Telecommunication Technologies Co.,LTD', '60B617': 'Fiberhome Telecommunication Technologies Co.,LTD', 'DC9C9F': 'Shenzhen YOUHUA Technology Co., Ltd', '74DFBF': 'Liteon Technology Corporation', 'F03E90': 'Ruckus Wireless', 'D8D723': 'IDS, Inc', '84AD58': 'HUAWEI TECHNOLOGIES CO.,LTD', '58605F': 'HUAWEI TECHNOLOGIES CO.,LTD', '00A0F4': 'GE', 'AC0D1B': 'LG Electronics (Mobile Communications)', 'F0D1B8': 'LEDVANCE', '986D35': 'IEEE Registration Authority', '88795B': 'Konka Group Co., Ltd.', '081F71': 'TP-LINK TECHNOLOGIES CO.,LTD.', '5CCA1A': 'Microsoft Mobile Oy', 'FC2FAA': 'Nokia', 'B07E70': 'Zadara Storage Ltd.', '0080B1': 'SOFTCOM A/S', '202DF8': 'Digital Media Cartridge Ltd.', '10D0AB': 'zte corporation', '0004C6': 'YAMAHA MOTOR CO.,LTD', '18A3E8': 'Fiberhome Telecommunication Technologies Co.,LTD', '741E93': 'Fiberhome Telecommunication Technologies Co.,LTD', '202D07': 'Samsung Electronics Co.,Ltd', 'D8803C': 'Anhui Huami Information Technology Company Limited', '0034DA': 'LG Electronics (Mobile Communications)', '3810D5': 'AVM Audiovisuelles Marketing und Computersysteme GmbH', '18C501': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', '00A0B8': 'NetApp', '00C88B': 'Cisco Systems, Inc', '24C3F9': 'Securitas Direct AB', '2C21D7': 'IMAX Corporation', '0009D2': 'Mai Logic Inc.', '006016': 'CLARIION', '981FB1': 'Shenzhen Lemon Network Technology Co.,Ltd', '0C5A9E': 'Wi-SUN Alliance', 'B44BD2': 'Apple, Inc.', 'DC415F': 'Apple, Inc.', '641225': 'Cisco Systems, Inc', '7864E6': 'Green Motive Technology Limited', '3CBEE1': 'NIKON CORPORATION', '102AB3': 'Xiaomi Communications Co Ltd', '40D357': 'Ison Technology Co., Ltd.', 'A41588': 'ARRIS Group, Inc.', 'F45C89': 'Apple, Inc.', '20768F': 'Apple, Inc.', '9C5CF9': 'Sony Mobile Communications Inc', '0011D1': 'Soft Imaging System GmbH', '98D686': 'Chyi Lee industry Co., ltd.', '8CC661': 'Current, powered by GE', '88A084': 'Formation Data Systems', 'E8B2AC': 'Apple, Inc.', 'E49A79': 'Apple, Inc.', '30A9DE': 'LG Innotek', 'F01B6C': 'vivo Mobile Communication Co., Ltd.', 'A0B9ED': 'Skytap', '94C960': 'Zhongshan B&T technology.co.,ltd', '74C330': 'SHENZHEN FAST TECHNOLOGIES CO.,LTD', '403F8C': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'DCB3B4': 'Honeywell Environmental & Combustion Controls (Tianjin) Co., Ltd.', '001D3B': 'Nokia Danmark A/S', '001DFD': 'Nokia Danmark A/S', '001E3B': 'Nokia Danmark A/S', '001EA4': 'Nokia Danmark A/S', '0026CC': 'Nokia Danmark A/S', '000EED': 'Nokia Danmark A/S', '4C2578': 'Nokia Corporation', 'BCC6DB': 'Nokia Corporation', '60A8FE': 'Nokia', '00119F': 'Nokia Danmark A/S', '001A16': 'Nokia Danmark A/S', '001A89': 'Nokia Danmark A/S', '001ADC': 'Nokia Danmark A/S', '0025CF': 'Nokia Danmark A/S', '0021AB': 'Nokia Danmark A/S', '001FDE': 'Nokia Danmark A/S', '001FDF': 'Nokia Danmark A/S', '547975': 'Nokia Corporation', 'A87B39': 'Nokia Corporation', '00247C': 'Nokia Danmark A/S', '002266': 'Nokia Danmark A/S', '0021FE': 'Nokia Danmark A/S', 'C477AB': 'Beijing ASU Tech Co.,Ltd', '000BCA': 'DATAVAN TC', '702559': 'CyberTAN Technology Inc.', '607EDD': 'Microsoft Mobile Oy', 'A8A089': 'Tactical Communications', '48365F': 'Wintecronics Ltd.', '001D20': 'Comtrend Corporation', '08373D': 'Samsung Electronics Co.,Ltd', '0C75BD': 'Cisco Systems, Inc', '300D43': 'Microsoft Mobile Oy', '00000E': 'FUJITSU LIMITED', '000B5D': 'FUJITSU LIMITED', 'C488E5': 'Samsung Electronics Co.,Ltd', '080581': 'Roku, Inc.', '000DF3': 'Asmax Solutions', '80ACAC': 'Juniper Networks', '000DB6': 'Broadcom', '000AF7': 'Broadcom', 'D40129': 'Broadcom', '001D00': 'Brivo Systems, LLC', '0020D6': 'Breezecom, Ltd.', '00E063': 'Cabletron Systems, Inc.', 'FCC734': 'Samsung Electronics Co.,Ltd', '8425DB': 'Samsung Electronics Co.,Ltd', 'B0EC71': 'Samsung Electronics Co.,Ltd', 'E458B8': 'Samsung Electronics Co.,Ltd', '088C2C': 'Samsung Electronics Co.,Ltd', '64B853': 'Samsung Electronics Co.,Ltd', '389496': 'Samsung Electronics Co.,Ltd', '5056BF': 'Samsung Electronics Co.,Ltd', '90F1AA': 'Samsung Electronics Co.,Ltd', '1077B1': 'Samsung Electronics Co.,Ltd', '001FC7': 'Casio Hitachi Mobile Communications Co., Ltd.', 'A49A58': 'Samsung Electronics Co.,Ltd', '08EE8B': 'Samsung Electronics Co.,Ltd', '0CA42A': 'OB Telecom Electronic Technology Co., Ltd', '74458A': 'Samsung Electronics Co.,Ltd', '5CDC96': 'Arcadyan Technology Corporation', '743170': 'Arcadyan Technology Corporation', '001A2A': 'Arcadyan Technology Corporation', '88252C': 'Arcadyan Technology Corporation', '40BA61': 'ARIMA Communications Corp.', '0011F5': 'ASKEY COMPUTER CORP', '0016E3': 'ASKEY COMPUTER CORP', 'E839DF': 'ASKEY COMPUTER CORP', '1CC63C': 'Arcadyan Technology Corporation', '1883BF': 'Arcadyan Technology Corporation', '68ED43': 'BlackBerry RTS', '70AAB2': 'BlackBerry RTS', '00146C': 'NETGEAR', '001E2A': 'NETGEAR', '00184D': 'NETGEAR', '00040E': 'AVM GmbH', '9CC7A6': 'AVM GmbH', 'A06391': 'NETGEAR', '20E52A': 'NETGEAR', '4494FC': 'NETGEAR', '200CC8': 'NETGEAR', 'C4473F': 'HUAWEI TECHNOLOGIES CO.,LTD', '744401': 'NETGEAR', 'E091F5': 'NETGEAR', '000F86': 'BlackBerry RTS', '0024D2': 'ASKEY COMPUTER CORP', 'B4EEB4': 'ASKEY COMPUTER CORP', 'E46449': 'ARRIS Group, Inc.', '40FC89': 'ARRIS Group, Inc.', '2C9E5F': 'ARRIS Group, Inc.', '002636': 'ARRIS Group, Inc.', '001CC1': 'ARRIS Group, Inc.', '001E5A': 'ARRIS Group, Inc.', '001371': 'ARRIS Group, Inc.', '0023EE': 'ARRIS Group, Inc.', '001ADE': 'ARRIS Group, Inc.', '745612': 'ARRIS Group, Inc.', '0050E3': 'ARRIS Group, Inc.', '002136': 'ARRIS Group, Inc.', '001626': 'ARRIS Group, Inc.', '0019A6': 'ARRIS Group, Inc.', 'E8C74F': 'Liteon Technology Corporation', 'D05349': 'Liteon Technology Corporation', '000BA2': 'Sumitomo Electric Industries,Ltd', '0008F6': 'Sumitomo Electric Industries,Ltd', '00005F': 'Sumitomo Electric Industries,Ltd', 'E8F724': 'Hewlett Packard Enterprise', '5CB524': 'Sony Mobile Communications Inc', '90C115': 'Sony Mobile Communications Inc', 'D05162': 'Sony Mobile Communications Inc', '18002D': 'Sony Mobile Communications Inc', '280DFC': 'Sony Interactive Entertainment Inc.', '001311': 'ARRIS Group, Inc.', 'D0DF9A': 'Liteon Technology Corporation', '1C659D': 'Liteon Technology Corporation', '3010B3': 'Liteon Technology Corporation', '701A04': 'Liteon Technology Corporation', '48D224': 'Liteon Technology Corporation', '20689D': 'Liteon Technology Corporation', '0024EF': 'Sony Mobile Communications Inc', '0025E7': 'Sony Mobile Communications Inc', '58170C': 'Sony Mobile Communications Inc', '0016B8': 'Sony Mobile Communications Inc', '7CBFB1': 'ARRIS Group, Inc.', '080046': 'Sony Corporation', 'ECF00E': 'AboCom', '00E098': 'AboCom', '74DAEA': 'Texas Instruments', '948815': 'Infinique Worldwide Inc', 'D0E44A': 'Murata Manufacturing Co., Ltd.', '384FF0': 'AzureWave Technology Inc.', 'E874E6': 'ADB Broadband Italia', '0020E0': 'Actiontec Electronics, Inc', '002662': 'Actiontec Electronics, Inc', '002553': 'ADB Broadband Italia', '00193E': 'ADB Broadband Italia', '000827': 'ADB Broadband Italia', '742F68': 'AzureWave Technology Inc.', '4C14A3': 'TCL Technoly Electronics (Huizhou) Co., Ltd.', '4CB0E8': 'Beijing RongZhi xinghua technology co., LTD', 'D887D5': 'Leadcore Technology CO.,LTD', '00F28B': 'Cisco Systems, Inc', '34E6AD': 'Intel Corporate', '081196': 'Intel Corporate', '183DA2': 'Intel Corporate', '809B20': 'Intel Corporate', '002314': 'Intel Corporate', '340286': 'Intel Corporate', '001CBF': 'Intel Corporate', 'B4B676': 'Intel Corporate', '3CA9F4': 'Intel Corporate', 'B88A60': 'Intel Corporate', '78FF57': 'Intel Corporate', '9C4E36': 'Intel Corporate', '3413E8': 'Intel Corporate', '002710': 'Intel Corporate', 'A48E0A': 'DeLaval International AB', 'AC2B6E': 'Intel Corporate', '9C3583': 'Nipro Diagnostics, Inc', 'C06118': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'B82A72': 'Dell Inc.', 'F8E079': 'Motorola Mobility LLC, a Lenovo Company', 'CCC3EA': 'Motorola Mobility LLC, a Lenovo Company', '40786A': 'Motorola Mobility LLC, a Lenovo Company', '0019D1': 'Intel Corporate', '0019D2': 'Intel Corporate', '001B21': 'Intel Corporate', '18FF0F': 'Intel Corporate', '34DE1A': 'Intel Corporate', 'E8B1FC': 'Intel Corporate', 'CC3D82': 'Intel Corporate', '001F3C': 'Intel Corporate', '002315': 'Intel Corporate', '00166F': 'Intel Corporate', '000A8A': 'Cisco Systems, Inc', '001D09': 'Dell Inc.', '0023AE': 'Dell Inc.', 'BC305B': 'Dell Inc.', '388602': 'Flexoptix GmbH', '4065A3': 'Sagemcom Broadband SAS', 'D02212': 'IEEE Registration Authority', '100723': 'IEEE Registration Authority', 'A44F29': 'IEEE Registration Authority', '74F8DB': 'IEEE Registration Authority', 'A43BFA': 'IEEE Registration Authority', 'B8CA3A': 'Dell Inc.', 'ECF4BB': 'Dell Inc.', 'D4BED9': 'Dell Inc.', '00194B': 'Sagemcom Broadband SAS', '001E74': 'Sagemcom Broadband SAS', '383BC8': '2Wire Inc', '60FE20': '2Wire Inc', '002456': '2Wire Inc', 'C0830A': '2Wire Inc', '00183F': '2Wire Inc', '000D56': 'Dell Inc.', '181E78': 'Sagemcom Broadband SAS', '0037B7': 'Sagemcom Broadband SAS', '0054BD': 'Swelaser AB', '001E4C': 'Hon Hai Precision Ind. Co.,Ltd.', '20BB76': 'COL GIOVANNI PAOLO SpA', '3CDD89': 'SOMO HOLDINGS & TECH. CO.,LTD.', '1801E3': 'Bittium Wireless Ltd', '149182': 'Belkin International Inc.', '18622C': 'Sagemcom Broadband SAS', '3C81D8': 'Sagemcom Broadband SAS', '40F201': 'Sagemcom Broadband SAS', 'D084B0': 'Sagemcom Broadband SAS', 'D8543A': 'Texas Instruments', '649C8E': 'Texas Instruments', '102EAF': 'Texas Instruments', '7C8EE4': 'Texas Instruments', 'B4EED4': 'Texas Instruments', 'D03761': 'Texas Instruments', 'C83E99': 'Texas Instruments', '40984E': 'Texas Instruments', '0017EB': 'Texas Instruments', '0017E6': 'Texas Instruments', 'C4EDBA': 'Texas Instruments', '001832': 'Texas Instruments', '3C2DB7': 'Texas Instruments', '5464D9': 'Sagemcom Broadband SAS', '00195B': 'D-Link Corporation', '000F3D': 'D-Link Corporation', '24DA11': 'NO NDA Inc', 'EC2280': 'D-Link International', '9C8E99': 'Hewlett Packard', '9059AF': 'Texas Instruments', 'BC6A29': 'Texas Instruments', '847E40': 'Texas Instruments', '001735': 'Intel Wireless Network Group', '74AC5F': 'Qiku Internet Network Scientific (Shenzhen) Co., Ltd.', '38CADA': 'Apple, Inc.', 'D0B33F': 'Shenzhen TINNO Mobile Technology Corp.', 'BCD1D3': 'Shenzhen TINNO Mobile Technology Corp.', 'D83C69': 'Shenzhen TINNO Mobile Technology Corp.', 'F4F5D8': 'Google, Inc.', '8C579B': 'Wistron Neweb Corporation', '0059AC': 'KPN. B.V.', '40D855': 'IEEE Registration Authority', '34AB37': 'Apple, Inc.', '2400BA': 'HUAWEI TECHNOLOGIES CO.,LTD', '24DF6A': 'HUAWEI TECHNOLOGIES CO.,LTD', '788B77': 'Standar Telecom', 'B0C090': 'Chicony Electronics Co., Ltd.', '907F61': 'Chicony Electronics Co., Ltd.', '0C0535': 'Juniper Systems', 'BC83A7': 'SHENZHEN CHUANGWEI-RGB ELECTRONICS CO.,LTD', 'BCEC23': 'SHENZHEN CHUANGWEI-RGB ELECTRONICS CO.,LTD', '18AF61': 'Apple, Inc.', '5CF938': 'Apple, Inc.', '009069': 'Juniper Networks', '64649B': 'Juniper Networks', 'F01C2D': 'Juniper Networks', '307C5E': 'Juniper Networks', 'AC06C7': 'ServerNet S.r.l.', 'E83EFC': 'ARRIS Group, Inc.', '900DCB': 'ARRIS Group, Inc.', '001DCD': 'ARRIS Group, Inc.', '001DD2': 'ARRIS Group, Inc.', '4018B1': 'Aerohive Networks Inc.', '8C09F4': 'ARRIS Group, Inc.', '8857EE': 'BUFFALO.INC', '101F74': 'Hewlett Packard', '009C02': 'Hewlett Packard', 'F4CE46': 'Hewlett Packard', 'DCFB02': 'BUFFALO.INC', '001635': 'Hewlett Packard', '0008C7': 'Hewlett Packard', '0010E3': 'Hewlett Packard', '000883': 'Hewlett Packard', 'A02BB8': 'Hewlett Packard', '0019BB': 'Hewlett Packard', '001F29': 'Hewlett Packard', '00215A': 'Hewlett Packard', '00237D': 'Hewlett Packard', 'E8ED05': 'ARRIS Group, Inc.', '789684': 'ARRIS Group, Inc.', 'CC65AD': 'ARRIS Group, Inc.', '002655': 'Hewlett Packard', '000D9D': 'Hewlett Packard', '001560': 'Hewlett Packard', '00207B': 'Intel Corporation', '001175': 'Intel Corporation', '780CB8': 'Intel Corporate', '185E0F': 'Intel Corporate', '2C8158': 'Hon Hai Precision Ind. Co.,Ltd.', '8002DF': 'ORA Inc.', '00306E': 'Hewlett Packard', '3C4A92': 'Hewlett Packard', '7C7D3D': 'HUAWEI TECHNOLOGIES CO.,LTD', '4482E5': 'HUAWEI TECHNOLOGIES CO.,LTD', '00234E': 'Hon Hai Precision Ind. Co.,Ltd.', '2C233A': 'Hewlett Packard', '000A57': 'Hewlett Packard', '0001E7': 'Hewlett Packard', '0001E6': 'Hewlett Packard', '002376': 'HTC Corporation', '38E7D8': 'HTC Corporation', '188796': 'HTC Corporation', 'B4CEF6': 'HTC Corporation', '8CDCD4': 'Hewlett Packard', 'D4C9EF': 'Hewlett Packard', 'FC15B4': 'Hewlett Packard', '3CA82A': 'Hewlett Packard', 'EC5F23': 'Qinghai Kimascend Electronics Technology Co. Ltd.', '047D50': 'Shenzhen Kang Ying Technology Co.Ltd.', '54EFFE': 'Fullpower Technologies, Inc.', '940937': 'HUMAX Co., Ltd.', 'E84DD0': 'HUAWEI TECHNOLOGIES CO.,LTD', '0C45BA': 'HUAWEI TECHNOLOGIES CO.,LTD', '20906F': 'Shenzhen Tencent Computer System Co., Ltd.', '6CE3B6': 'Nera Telecommunications Ltd.', 'DCD321': 'HUMAX Co., Ltd.', '6C72E7': 'Apple, Inc.', '741BB2': 'Apple, Inc.', '6CE873': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'C46E1F': 'TP-LINK TECHNOLOGIES CO.,LTD.', '50FA84': 'TP-LINK TECHNOLOGIES CO.,LTD.', '44B32D': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'CC4463': 'Apple, Inc.', '882593': 'TP-LINK TECHNOLOGIES CO.,LTD.', '001FE1': 'Hon Hai Precision Ind. Co.,Ltd.', 'D85D4C': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'A0F3C1': 'TP-LINK TECHNOLOGIES CO.,LTD.', '001D0F': 'TP-LINK TECHNOLOGIES CO.,LTD.', '0023CD': 'TP-LINK TECHNOLOGIES CO.,LTD.', '90489A': 'Hon Hai Precision Ind. Co.,Ltd.', '0071CC': 'Hon Hai Precision Ind. Co.,Ltd.', 'B05B67': 'HUAWEI TECHNOLOGIES CO.,LTD', 'CCA223': 'HUAWEI TECHNOLOGIES CO.,LTD', '786A89': 'HUAWEI TECHNOLOGIES CO.,LTD', '384608': 'zte corporation', '4CAC0A': 'zte corporation', 'B4B362': 'zte corporation', 'B075D5': 'zte corporation', 'D0154A': 'zte corporation', '0026ED': 'zte corporation', '006057': 'Murata Manufacturing Co., Ltd.', '783E53': 'BSkyB Ltd', '0019FB': 'BSkyB Ltd', 'C4F57C': 'Brocade Communications Systems, Inc.', '14B968': 'HUAWEI TECHNOLOGIES CO.,LTD', '5CF96A': 'HUAWEI TECHNOLOGIES CO.,LTD', '0012F2': 'Brocade Communications Systems, Inc.', '00051E': 'Brocade Communications Systems, Inc.', '083E8E': 'Hon Hai Precision Ind. Co.,Ltd.', '002293': 'zte corporation', '10A5D0': 'Murata Manufacturing Co., Ltd.', '50A72B': 'HUAWEI TECHNOLOGIES CO.,LTD', '0CD6BD': 'HUAWEI TECHNOLOGIES CO.,LTD', '00F81C': 'HUAWEI TECHNOLOGIES CO.,LTD', '087A4C': 'HUAWEI TECHNOLOGIES CO.,LTD', 'ACE215': 'HUAWEI TECHNOLOGIES CO.,LTD', '346BD3': 'HUAWEI TECHNOLOGIES CO.,LTD', '70723C': 'HUAWEI TECHNOLOGIES CO.,LTD', 'ACE87B': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F83DFF': 'HUAWEI TECHNOLOGIES CO.,LTD', '285FDB': 'HUAWEI TECHNOLOGIES CO.,LTD', '404D8E': 'HUAWEI TECHNOLOGIES CO.,LTD', '4C5499': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F49FF3': 'HUAWEI TECHNOLOGIES CO.,LTD', '240995': 'HUAWEI TECHNOLOGIES CO.,LTD', '84DBAC': 'HUAWEI TECHNOLOGIES CO.,LTD', '94772B': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D440F0': 'HUAWEI TECHNOLOGIES CO.,LTD', '04021F': 'HUAWEI TECHNOLOGIES CO.,LTD', '10CDAE': 'Avaya Inc', '048A15': 'Avaya Inc', 'B4B017': 'Avaya Inc', '90FB5B': 'Avaya Inc', 'C8F406': 'Avaya Inc', '7052C5': 'Avaya Inc', 'F81547': 'Avaya Inc', '506184': 'Avaya Inc', '185936': 'Xiaomi Communications Co Ltd', '20A783': 'miControl GmbH', '200BC7': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F84ABF': 'HUAWEI TECHNOLOGIES CO.,LTD', '78D752': 'HUAWEI TECHNOLOGIES CO.,LTD', '104780': 'HUAWEI TECHNOLOGIES CO.,LTD', '548998': 'HUAWEI TECHNOLOGIES CO.,LTD', '00040D': 'Avaya Inc', '70A8E3': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F8E811': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F8A45F': 'Xiaomi Communications Co Ltd', '640980': 'Xiaomi Communications Co Ltd', '94049C': 'HUAWEI TECHNOLOGIES CO.,LTD', '688F84': 'HUAWEI TECHNOLOGIES CO.,LTD', '30D17E': 'HUAWEI TECHNOLOGIES CO.,LTD', '0050BD': 'Cisco Systems, Inc', '00906F': 'Cisco Systems, Inc', '74D6EA': 'Texas Instruments', '209148': 'Texas Instruments', '544A16': 'Texas Instruments', 'E02F6D': 'Cisco Systems, Inc', '58971E': 'Cisco Systems, Inc', 'B4E9B0': 'Cisco Systems, Inc', '000832': 'Cisco Systems, Inc', '189C5D': 'Cisco Systems, Inc', '5CA48A': 'Cisco Systems, Inc', '1C1D86': 'Cisco Systems, Inc', '60735C': 'Cisco Systems, Inc', '34A84E': 'Cisco Systems, Inc', '54781A': 'Cisco Systems, Inc', '00605C': 'Cisco Systems, Inc', '0006C1': 'Cisco Systems, Inc', '00E014': 'Cisco Systems, Inc', 'DCA5F4': 'Cisco Systems, Inc', '5017FF': 'Cisco Systems, Inc', '70105C': 'Cisco Systems, Inc', '10F311': 'Cisco Systems, Inc', '0050F0': 'Cisco Systems, Inc', '005014': 'Cisco Systems, Inc', '0090F2': 'Cisco Systems, Inc', '0CE0E4': 'PLANTRONICS, INC.', '74A2E6': 'Cisco Systems, Inc', 'BCF1F2': 'Cisco Systems, Inc', 'C80084': 'Cisco Systems, Inc', '40A6E8': 'Cisco Systems, Inc', '3085A9': 'ASUSTek COMPUTER INC.', 'B83861': 'Cisco Systems, Inc', '580A20': 'Cisco Systems, Inc', '2C3ECF': 'Cisco Systems, Inc', 'B05947': 'Shenzhen Qihu Intelligent Technology Company Limited', '346288': 'Cisco Systems, Inc', 'CCD8C1': 'Cisco Systems, Inc', '7C0ECE': 'Cisco Systems, Inc', 'A0ECF9': 'Cisco Systems, Inc', '508789': 'Cisco Systems, Inc', '381C1A': 'Cisco Systems, Inc', 'BC671C': 'Cisco Systems, Inc', '001947': 'Cisco SPVTG', '001839': 'Cisco-Linksys, LLC', '002215': 'ASUSTek COMPUTER INC.', 'E0CB4E': 'ASUSTek COMPUTER INC.', '547C69': 'Cisco Systems, Inc', '001731': 'ASUSTek COMPUTER INC.', 'DCCEC1': 'Cisco Systems, Inc', '9C57AD': 'Cisco Systems, Inc', '60FEC5': 'Apple, Inc.', 'E425E7': 'Apple, Inc.', 'BC926B': 'Apple, Inc.', '101C0C': 'Apple, Inc.', '080007': 'Apple, Inc.', '004096': 'Cisco Systems, Inc', '30F70D': 'Cisco Systems, Inc', 'E86549': 'Cisco Systems, Inc', 'B07D47': 'Cisco Systems, Inc', '38ED18': 'Cisco Systems, Inc', '382056': 'Cisco Systems, Inc', '40D32D': 'Apple, Inc.', 'C42C03': 'Apple, Inc.', '9027E4': 'Apple, Inc.', '109ADD': 'Apple, Inc.', '581FAA': 'Apple, Inc.', '88C663': 'Apple, Inc.', '001F5B': 'Apple, Inc.', '002436': 'Apple, Inc.', '00254B': 'Apple, Inc.', '0016CB': 'Apple, Inc.', '0017F2': 'Apple, Inc.', '7C6D62': 'Apple, Inc.', '20C9D0': 'Apple, Inc.', '68967B': 'Apple, Inc.', '84FCFE': 'Apple, Inc.', 'E48B7F': 'Apple, Inc.', '008865': 'Apple, Inc.', 'BC3BAF': 'Apple, Inc.', '3CE072': 'Apple, Inc.', '38484C': 'Apple, Inc.', 'A46706': 'Apple, Inc.', '8C5877': 'Apple, Inc.', '7CF05F': 'Apple, Inc.', '804971': 'Apple, Inc.', '6C3E6D': 'Apple, Inc.', 'BC6778': 'Apple, Inc.', 'D8D1CB': 'Apple, Inc.', 'A8FAD8': 'Apple, Inc.', 'B817C2': 'Apple, Inc.', '7C11BE': 'Apple, Inc.', '283737': 'Apple, Inc.', '50EAD6': 'Apple, Inc.', '98D6BB': 'Apple, Inc.', '189EFC': 'Apple, Inc.', 'ACCF5C': 'Apple, Inc.', '80006E': 'Apple, Inc.', '848E0C': 'Apple, Inc.', '3C15C2': 'Apple, Inc.', '6C709F': 'Apple, Inc.', 'C0F2FB': 'Apple, Inc.', '24E314': 'Apple, Inc.', '80E650': 'Apple, Inc.', '90FD61': 'Apple, Inc.', '087045': 'Apple, Inc.', 'A88808': 'Apple, Inc.', 'A4C361': 'Apple, Inc.', '2CF0EE': 'Apple, Inc.', '5C97F3': 'Apple, Inc.', 'D4F46F': 'Apple, Inc.', '6476BA': 'Apple, Inc.', '34E2FD': 'Apple, Inc.', '04489A': 'Apple, Inc.', 'F0F61C': 'Apple, Inc.', '8C2937': 'Apple, Inc.', 'B09FBA': 'Apple, Inc.', '0C4DE9': 'Apple, Inc.', 'E0F5C6': 'Apple, Inc.', 'A0EDCD': 'Apple, Inc.', 'F0F249': 'Hitron Technologies. Inc', '2857BE': 'Hangzhou Hikvision Digital Technology Co.,Ltd.', '5CF5DA': 'Apple, Inc.', '18EE69': 'Apple, Inc.', '649ABE': 'Apple, Inc.', 'F099BF': 'Apple, Inc.', '94E96A': 'Apple, Inc.', 'AC293A': 'Apple, Inc.', '9CFC01': 'Apple, Inc.', '9C35EB': 'Apple, Inc.', '48437C': 'Apple, Inc.', '34A395': 'Apple, Inc.', '787E61': 'Apple, Inc.', '60F81D': 'Apple, Inc.', '38C986': 'Apple, Inc.', 'D03311': 'Apple, Inc.', '507A55': 'Apple, Inc.', 'C8C2C6': 'Shanghai Airm2m Communication Technology Co., Ltd', '789C85': 'August Home, Inc.', '74D7CA': 'Panasonic Corporation Automotive', '5882A8': 'Microsoft', '58685D': 'Tempo Australia Pty Ltd', '544B8C': 'Juniper Networks', 'DCFE07': 'PEGATRON CORPORATION', '707938': 'Wuxi Zhanrui Electronic Technology Co.,LTD', '243184': 'SHARP Corporation', '582BDB': 'Pax AB', 'E03676': 'HUAWEI TECHNOLOGIES CO.,LTD', 'EC388F': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D03E5C': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C49E41': 'G24 Power Limited', 'B813E9': 'Trace Live Network', '80B709': 'Viptela, Inc', 'F00D5C': 'JinQianMao Technology Co.,Ltd.', '54BE53': 'zte corporation', '280E8B': 'Beijing Spirit Technology Development Co., Ltd.', 'F44D30': 'Elitegroup Computer Systems Co.,Ltd.', '0C8610': 'Juniper Networks', '38D40B': 'Samsung Electronics Co.,Ltd', 'E83A12': 'Samsung Electronics Co.,Ltd', '24DA9B': 'Motorola Mobility LLC, a Lenovo Company', '30E090': 'Linctronix Ltd,', 'A4DCBE': 'HUAWEI TECHNOLOGIES CO.,LTD', 'ECB870': 'Beijing Heweinet Technology Co.,Ltd.', '94BBAE': 'Husqvarna AB', 'D40AA9': 'ARRIS Group, Inc.', '203D66': 'ARRIS Group, Inc.', 'D494E8': 'HUAWEI TECHNOLOGIES CO.,LTD', 'B078F0': 'Beijing HuaqinWorld Technology Co.,Ltd.', '209BCD': 'Apple, Inc.', '3095E3': 'SHANGHAI SIMCOM LIMITED', '80656D': 'Samsung Electronics Co.,Ltd', 'FCF136': 'Samsung Electronics Co.,Ltd', 'B88687': 'Liteon Technology Corporation', '18895B': 'Samsung Electronics Co.,Ltd', '584925': 'E3 Enterprise', '94F278': 'Elma Electronic', '283713': 'Shenzhen 3Nod Digital Technology Co., Ltd.', '0894EF': 'Wistron Infocomm (Zhongshan) Corporation', 'E0319E': 'Valve Corporation', '3C5CC3': 'Shenzhen First Blue Chip Technology Ltd', 'C49FF3': 'Mciao Technologies, Inc.', '788E33': 'Jiangsu SEUIC Technology Co.,Ltd', 'ECEED8': 'ZTLX Network Technology Co.,Ltd', '80EB77': 'Wistron Corporation', '483974': 'Proware Technologies Co., Ltd.', '30FFF6': 'HangZhou KuoHeng Technology Co.,ltd', 'D8EFCD': 'Nokia', '4CC681': 'Shenzhen Aisat Electronic Co., Ltd.', '48E244': 'Hon Hai Precision Ind. Co.,Ltd.', '7CAB25': 'MESMO TECHNOLOGY INC.', 'B0411D': 'ITTIM Technologies', 'F8BF09': 'HUAWEI TECHNOLOGIES CO.,LTD', '7CB25C': 'Acacia Communications', 'DCDB70': 'Tonfunk Systementwicklung und Service GmbH', '800B51': 'Chengdu XGimi Technology Co.,Ltd', 'F80D60': 'CANON INC.', 'F0182B': 'LG Chem', '3481F4': 'SST Taiwan Ltd.', '7CA237': 'King Slide Technology CO., LTD.', 'D404CD': 'ARRIS Group, Inc.', '584822': 'Sony Mobile Communications Inc', '747336': 'MICRODIGTAL Inc', '382B78': 'ECO PLUGS ENTERPRISE CO., LTD', 'A47B2C': 'Nokia', '24E5AA': 'Philips Oral Healthcare, Inc.', '78BDBC': 'Samsung Electronics Co.,Ltd', '349B5B': 'Maquet GmbH', '884157': 'Shenzhen Atsmart Technology Co.,Ltd.', 'D89A34': 'Beijing SHENQI Technology Co., Ltd.', 'A0A65C': 'Supercomputing Systems AG', '485073': 'Microsoft Corporation', 'E8377A': 'Zyxel Communications Corporation', '803B2A': 'ABB Xiamen Low Voltage Equipment Co.,Ltd.', 'C4EA1D': 'Technicolor', '7CF90E': 'Samsung Electronics Co.,Ltd', '50F0D3': 'Samsung Electronics Co.,Ltd', '00A784': 'ITX security', '84119E': 'Samsung Electronics Co.,Ltd', '149A10': 'Microsoft Corporation', '38FACA': 'Skyworth Digital Technology(Shenzhen) Co.,Ltd', '5CB43E': 'HUAWEI TECHNOLOGIES CO.,LTD', '707781': 'Hon Hai Precision Ind. Co.,Ltd.', '54E140': 'INGENICO', 'E4907E': 'Motorola Mobility LLC, a Lenovo Company', '746A3A': 'Aperi Corporation', '94A7B7': 'zte corporation', '1844E6': 'zte corporation', '3CCE15': 'Mercedes-Benz USA, LLC', '287610': 'IgniteNet', '20D75A': 'Posh Mobile Limited', 'F41563': 'F5 Networks, Inc.', '8C8B83': 'Texas Instruments', '4011DC': 'Sonance', '1C8341': 'Hefei Bitland Information Technology Co.Ltd', '081FEB': 'BinCube', '785F4C': 'Argox Information Co., Ltd.', '34CC28': 'Nexpring Co. LTD.,', '54E2C8': 'Dongguan Aoyuan Electronics Technology Co., Ltd', '6C1E70': 'Guangzhou YBDS IT Co.,Ltd', '54B80A': 'D-Link International', 'D8ADDD': 'Sonavation, Inc.', '8833BE': 'Ivenix, Inc.', 'E48D8C': 'Routerboard.com', '706879': 'Saijo Denki International Co., Ltd.', '10AF78': 'Shenzhen ATUE Technology Co., Ltd', 'CC19A8': 'PT Inovação e Sistemas SA', 'B4B265': 'DAEHO I&T', 'E03560': 'Challenger Supply Holdings, LLC', '3CCB7C': 'TCT mobile ltd', '249EAB': 'HUAWEI TECHNOLOGIES CO.,LTD', '244B03': 'Samsung Electronics Co.,Ltd', 'E4CE70': 'Health & Life co., Ltd.', '7C11CD': 'QianTang Technology', 'CCA4AF': 'Shenzhen Sowell Technology Co., LTD', '102C83': 'XIMEA', '6CA75F': 'zte corporation', '8C7967': 'zte corporation', '7858F3': 'Vachen Co.,Ltd', '709C8F': 'Nero AG', '007E56': 'China Dragon Technology Limited', '74E28C': 'Microsoft Corporation', '0071C2': 'PEGATRON CORPORATION', '7C8274': 'Shenzhen Hikeen Technology CO.,LTD', '94D417': 'GPI KOREA INC.', '244B81': 'Samsung Electronics Co.,Ltd', '704E66': 'SHENZHEN FAST TECHNOLOGIES CO.,LTD', 'D855A3': 'zte corporation', '38D82F': 'zte corporation', 'F07959': 'ASUSTek COMPUTER INC.', 'E08E3C': 'Aztech Electronics Pte Ltd', '844BB7': 'Beijing Sankuai Online Technology Co.,Ltd', '68F0BC': 'Shenzhen LiWiFi Technology Co., Ltd', '300EE3': 'Aquantia Corporation', '18F145': 'NetComm Wireless Limited', 'ACABBF': 'AthenTek Inc.', '2884FA': 'SHARP Corporation', '60AF6D': 'Samsung Electronics Co.,Ltd', 'B85A73': 'Samsung Electronics Co.,Ltd', '3C1E04': 'D-Link International', '60D9A0': 'Lenovo Mobile Communication Technology Ltd.', '68B983': 'b-plus GmbH', '78B3B9': 'ShangHai sunup lighting CO.,LTD', '04C09C': 'Tellabs Inc.', '981DFA': 'Samsung Electronics Co.,Ltd', '186882': 'Beward R&D Co., Ltd.', 'EC8009': 'NovaSparks', '50ADD5': 'Dynalec Corporation', 'B04519': 'TCT mobile ltd', 'D88D5C': 'Elentec', '7429AF': 'Hon Hai Precision Ind. Co.,Ltd.', '3C1A0F': 'ClearSky Data', 'E8CC18': 'D-Link International', 'B09137': 'ISis ImageStream Internet Solutions, Inc', '8C0551': 'Koubachi AG', 'D897BA': 'PEGATRON CORPORATION', 'A8D88A': 'Wyconn', '40EACE': 'FOUNDER BROADBAND NETWORK SERVICE CO.,LTD', '848EDF': 'Sony Mobile Communications Inc', 'A49D49': 'Ketra, Inc.', 'C03896': 'Hon Hai Precision Ind. Co.,Ltd.', '2C5089': 'Shenzhen Kaixuan Visual Technology Co.,Limited', '948E89': 'INDUSTRIAS UNIDAS SA DE CV', '00AEFA': 'Murata Manufacturing Co., Ltd.', '841826': 'Osram GmbH', 'F8E903': 'D-Link International', 'E89606': 'testo Instruments (Shenzhen) Co., Ltd.', '1C7E51': '3bumen.com', '6872DC': 'CETORY.TV Company Limited', '3077CB': 'Maike Industry(Shenzhen)CO.,LTD', '2497ED': 'Techvision Intelligent Technology Limited', '909F33': 'EFM Networks', '600417': 'POSBANK CO.,LTD', '207693': 'Lenovo (Beijing) Limited.', '084656': 'VEO-LABS', 'EC3C5A': 'SHEN ZHEN HENG SHENG HUI DIGITAL TECHNOLOGY CO.,LTD', '4488CB': 'Camco Technologies NV', '6CBFB5': 'Noon Technology Co., Ltd', '50294D': 'NANJING IOT SENSOR TECHNOLOGY CO,LTD', '0CCFD1': 'SPRINGWAVE Co., Ltd', '74BADB': 'Longconn Electornics(shenzhen)Co.,Ltd', 'B8F317': 'iSun Smasher Communications Private Limited', '8CF813': 'ORANGE POLSKA', '549F35': 'Dell Inc.', '2442BC': 'Alinco,incorporated', 'F82441': 'Yeelink', '108A1B': 'RAONIX Inc.', '102F6B': 'Microsoft Corporation', '8CB094': 'Airtech I&C Co., Ltd', '945493': 'Rigado, LLC', '68F06D': 'ALONG INDUSTRIAL CO., LIMITED', 'F42853': 'Zioncom Electronics (Shenzhen) Ltd.', 'D4EC86': 'LinkedHope Intelligent Technologies Co., Ltd', '1C9C26': 'Zoovel Technologies', '046785': 'scemtec Hard- und Software fuer Mess- und Steuerungstechnik GmbH', 'D0FA1D': 'Qihoo 360 Technology Co.,Ltd', 'AC11D3': 'Suzhou HOTEK Video Technology Co. Ltd', '8432EA': 'ANHUI WANZTEN P&T CO., LTD', 'E01D38': 'Beijing HuaqinWorld Technology Co.,Ltd', 'E47FB2': 'FUJITSU LIMITED', 'FC6DC0': 'BME CORPORATION', '24D13F': 'MEXUS CO.,LTD', '7824AF': 'ASUSTek COMPUTER INC.', 'FC9FE1': 'CONWIN.Tech. Ltd', 'B89BE4': 'ABB Power Systems Power Generation', 'A81B5D': 'Foxtel Management Pty Ltd', '505065': 'TAKT Corporation', '40C62A': 'Shanghai Jing Ren Electronic Technology Co., Ltd.', 'E8150E': 'Nokia Corporation', 'C44202': 'Samsung Electronics Co.,Ltd', 'B4AE6F': 'Circle Reliance, Inc DBA Cranberry Networks', '90DA6A': 'FOCUS H&S Co., Ltd.', 'DC537C': 'Compal Broadband Networks, Inc.', '44A6E5': 'THINKING TECHNOLOGY CO.,LTD', 'A45DA1': 'ADB Broadband Italia', '0CAC05': 'Unitend Technologies Inc.', '4C6E6E': 'Comnect Technology CO.,LTD', '8C3357': 'HiteVision Digital Media Technology Co.,Ltd.', '3CAA3F': 'iKey, Ltd.', '0C383E': 'Fanvil Technology Co., Ltd.', '60CDA9': 'Abloomy', 'B8AD3E': 'BLUECOM', '183009': 'Woojin Industrial Systems Co., Ltd.', '74DBD1': 'Ebay Inc', '30B5F1': 'Aitexin Technology Co., Ltd', 'B01041': 'Hon Hai Precision Ind. Co.,Ltd.', '80AD67': 'Kasda Networks Inc', '18D5B6': 'SMG Holdings LLC', '5C2E59': 'Samsung Electronics Co.,Ltd', 'A8E539': 'Moimstone Co.,Ltd', '54B753': 'Hunan Fenghui Yinjia Science And Technology Co.,Ltd', '103047': 'Samsung Electronics Co.,Ltd', 'F884F2': 'Samsung Electronics Co.,Ltd', 'B0754D': 'Nokia', 'E0CBEE': 'Samsung Electronics Co.,Ltd', '4C3909': 'HPL Electric & Power Private Limited', '907EBA': 'UTEK TECHNOLOGY (SHENZHEN) CO.,LTD', 'A002DC': 'Amazon Technologies Inc.', '542AA2': 'Alpha Networks Inc.', '84948C': 'Hitron Technologies. Inc', 'A8F7E0': 'PLANET Technology Corporation', '4486C1': 'Siemens Low Voltage & Products', '4045DA': 'Spreadtrum Communications (Shanghai) Co., Ltd.', '3451AA': 'JID GLOBAL', '98BE94': 'IBM', '6C198F': 'D-Link International', 'C8FF77': 'Dyson Limited', 'B49EAC': "Imagik Int'l Corp", 'CC07E4': 'Lenovo Mobile Communication Technology Ltd.', 'C46BB4': 'myIDkey', '0C63FC': 'Nanjing Signway Technology Co., Ltd', 'D4E08E': 'ValueHD Corporation', 'C89F1D': 'SHENZHEN COMMUNICATION TECHNOLOGIES CO.,LTD', '143DF2': 'Beijing Shidai Hongyuan Network Communication Co.,Ltd', '2C39C1': 'Ciena Corporation', '54EE75': 'Wistron InfoComm(Kunshan)Co.,Ltd.', '885BDD': 'Aerohive Networks Inc.', '0874F6': 'Winterhalter Gastronom GmbH', 'D8492F': 'CANON INC.', '800E24': 'ForgetBox', '3C25D7': 'Nokia Corporation', '30A8DB': 'Sony Mobile Communications Inc', '18FF2E': 'Shenzhen Rui Ying Da Technology Co., Ltd', '847207': 'I&C Technology', 'CCA614': 'AIFA TECHNOLOGY CORP.', '90F1B0': 'Hangzhou Anheng Info&Tech CO.,LTD', '4C8B30': 'Actiontec Electronics, Inc', '0805CD': 'DongGuang EnMai Electronic Product Co.Ltd.', '54D163': 'MAX-TECH,INC', '48B5A7': 'Glory Horse Industries Ltd.', '0C4F5A': 'ASA-RT s.r.l.', 'D4224E': 'Alcatel Lucent', '9C86DA': 'Phoenix Geophysics Ltd.', '2C073C': 'DEVLINE LIMITED', '7C1A03': '8Locations Co., Ltd.', 'ACB859': 'Uniband Electronic Corp,', '2C9AA4': 'Eolo SpA', '88B1E1': ' Mojo Networks, Inc.', '90DB46': 'E-LEAD ELECTRONIC CO., LTD', '344F5C': 'R&amp;M AG', '6047D4': 'FORICS Electronic Technology Co., Ltd.', 'FCF8B7': 'TRONTEQ Electronic', '30F42F': 'ESP', '704E01': 'KWANGWON TECH CO., LTD.', '746A8F': 'VS Vision Systems GmbH', '54A31B': 'Shenzhen Linkworld Technology Co,.LTD', 'CC398C': 'Shiningtek', '1820A6': 'Sage Co., Ltd.', '20EAC7': 'SHENZHEN RIOPINE ELECTRONICS CO., LTD', '64B370': 'PowerComm Solutions LLC', '5CF50D': 'Institute of microelectronic applications', '749C52': 'Huizhou Desay SV Automotive Co., Ltd.', 'C4291D': 'KLEMSAN ELEKTRIK ELEKTRONIK SAN.VE TIC.AS.', '6C5F1C': 'Lenovo Mobile Communication Technology Ltd.', '7CE4AA': 'Private', '083F3E': 'WSH GmbH', '2C957F': 'zte corporation', '3C0C48': 'Servergy, Inc.', '10DEE4': 'automationNEXT GmbH', 'F03A4B': 'Bloombase, Inc.', 'A0E453': 'Sony Mobile Communications Inc', '404A18': 'Addrek Smart Solutions', 'C0C569': 'SHANGHAI LYNUC CNC TECHNOLOGY CO.,LTD', 'C4C0AE': 'MIDORI ELECTRONIC CO., LTD.', 'ACC595': 'Graphite Systems', '7CE1FF': 'Computer Performance, Inc. DBA Digital Loggers, Inc.', 'D8150D': 'TP-LINK TECHNOLOGIES CO.,LTD.', '5850AB': 'TLS Corporation', '7CCD11': 'MS-Magnet', '98FF6A': 'OTEC(Shanghai)Technology Co.,Ltd.', 'BC1A67': 'YF Technology Co., Ltd', '4CD7B6': 'Helmer Scientific', '8425A4': 'Tariox Limited', '483D32': 'Syscor Controls &amp; Automation', 'CC856C': 'SHENZHEN MDK DIGITAL TECHNOLOGY CO.,LTD', 'AC6BAC': 'Jenny Science AG', 'D8EE78': 'Moog Protokraft', '241148': 'Entropix, LLC', 'C445EC': 'Shanghai Yali Electron Co.,LTD', 'E0E631': 'SNB TECHNOLOGIES LIMITED', '78B5D2': 'Ever Treasure Industrial Limited', 'F8572E': 'Core Brands, LLC', '50ED78': 'Changzhou Yongse Infotech Co.,Ltd', '90028A': 'Shenzhen Shidean Legrand Electronic Products Co.,Ltd', '1CC11A': 'Wavetronix', 'FC09D8': 'ACTEON Group', '743ECB': 'Gentrice tech', '7C444C': 'Entertainment Solutions, S.L.', '0444A1': 'TELECON GALICIA,S.A.', '20C60D': 'Shanghai annijie Information technology Co.,LTD', '38CA97': 'Contour Design LLC', 'BC2D98': 'ThinGlobal LLC', '1879A2': 'GMJ ELECTRIC LIMITED', 'E0C86A': 'SHENZHEN TW-SCIE Co., Ltd', 'BCEE7B': 'ASUSTek COMPUTER INC.', '3413A8': 'Mediplan Limited', '7C9763': 'Openmatics s.r.o.', '48A2B7': 'Kodofon JSC', 'CC7498': 'Filmetrics Inc.', '085AE0': 'Recovision Technology Co., Ltd.', '20E791': 'Siemens Healthcare Diagnostics, Inc', '089758': 'Shenzhen Strong Rising Electronics Co.,Ltd DongGuan Subsidiary', 'FC19D0': 'Cloud Vision Networks Technology Co.,Ltd.', '9486D4': 'Surveillance Pro Corporation', '9CD643': 'D-Link International', '3C18A0': 'Luxshare Precision Industry Company Limited', '8CAE89': 'Y-cam Solutions Ltd', '94E98C': 'Nokia', 'FCE1D9': 'Stable Imaging Solutions LLC', '50206B': 'Emerson Climate Technologies Transportation Solutions', '7CBF88': 'Mobilicom LTD', '60DB2A': 'HNS', 'B04545': 'YACOUB Automation GmbH', 'C8EE75': 'Pishion International Co. Ltd', 'CC3429': 'TP-LINK TECHNOLOGIES CO.,LTD.', '64BABD': 'SDJ Technologies, Inc.', '24C848': 'mywerk Portal GmbH', 'CCFB65': 'Nintendo Co., Ltd.', 'A0A23C': 'GPMS', '68FCB3': 'Next Level Security Systems, Inc.', '94C3E4': 'Atlas Copco IAS GmbH', '34885D': 'Logitech Far East', '88576D': 'XTA Electronics Ltd', 'BC4100': 'CODACO ELECTRONIC s.r.o.', 'FCD817': 'Beijing Hesun Technologies Co.Ltd.', '682DDC': 'Wuhan Changjiang Electro-Communication Equipment CO.,LTD', 'E8611F': 'Dawning Information Industry Co.,Ltd', '2847AA': 'Nokia Corporation', '5CD61F': 'Qardio, Inc', '705957': 'Medallion Instrumentation Systems', '9C443D': 'CHENGDU XUGUANG TECHNOLOGY CO, LTD', 'B424E7': 'Codetek Technology Co.,Ltd', '542F89': 'Euclid Laboratories, Inc.', '909916': 'ELVEES NeoTek OJSC', '00A2FF': 'abatec group AG', '6024C1': 'Jiangsu Zhongxun Electronic Technology Co., Ltd', 'A0143D': 'PARROT SA', 'FC1BFF': 'V-ZUG AG', 'F42896': 'SPECTO PAINEIS ELETRONICOS LTDA', '78CB33': 'DHC Software Co.,Ltd', '60A9B0': 'Merchandising Technologies, Inc', '5027C7': 'TECHNART Co.,Ltd', '6C5AB5': 'TCL Technoly Electronics (Huizhou) Co., Ltd.', '385AA8': 'Beijing Zhongdun Security Technology Development Co.', 'F4A294': 'EAGLE WORLD DEVELOPMENT CO., LIMITED', 'EC3E09': 'PERFORMANCE DESIGNED PRODUCTS, LLC', '947C3E': 'Polewall Norge AS', '34A3BF': 'Terewave. Inc.', '8C088B': 'Remote Solution', 'B43E3B': 'Viableware, Inc', '0C5CD8': 'DOLI Elektronik GmbH', '3C15EA': 'TESCOM CO., LTD.', 'E80410': 'Private', 'F4BD7C': 'Chengdu jinshi communication Co., LTD', 'DCC422': 'Systembase Limited', 'C8F36B': 'Yamato Scale Co.,Ltd.', '98F8C1': 'IDT Technology Limited', '6CD1B0': 'WING SING ELECTRONICS HONG KONG LIMITED', 'A4F522': 'CHOFU SEISAKUSHO CO.,LTD', '845C93': 'Chabrier Services', '68E166': 'Private', 'BC2BD7': 'Revogi Innovation Co., Ltd.', '286D97': 'SAMJIN Co., Ltd.', '24ECD6': 'CSG Science & Technology Co.,Ltd.Hefei', 'CC2A80': 'Micro-Biz intelligence solutions Co.,Ltd', '60FEF9': 'Thomas & Betts', 'B8DC87': 'IAI Corporation', '7C6FF8': 'ShenZhen ACTO Digital Video Technology Co.,Ltd.', 'DCF755': 'SITRONIK', '5C026A': 'Applied Vision Corporation', '0C9301': 'PT. Prasimax Inovasi Teknologi', '746630': 'T:mi Ytti', '6CB350': 'Anhui comhigher tech co.,ltd', '3859F8': 'MindMade Sp. z o.o.', '4CDF3D': 'TEAM ENGINEERS ADVANCE TECHNOLOGIES INDIA PVT LTD', 'E89218': 'Arcontia International AB', '0075E1': 'Ampt, LLC', 'D46A91': 'Snap AV', '98CDB4': 'Virident Systems, Inc.', 'A42305': 'Open Networking Laboratory', '1C48F9': 'GN Netcom A/S', 'B0FEBD': 'Private', '60699B': 'isepos GmbH', 'D4D50D': 'Southwest Microwave, Inc', '34CD6D': 'CommSky Technologies', 'E4F3E3': 'Shanghai iComhome Co.,Ltd.', '9046B7': 'Vadaro Pte Ltd', '04CF25': 'MANYCOLORS, INC.', '80BBEB': 'Satmap Systems Ltd', '00B78D': 'Nanjing Shining Electric Automation Co., Ltd', '60FE1E': 'China Palms Telecom.Ltd', 'B050BC': 'SHENZHEN BASICOM ELECTRONIC CO.,LTD.', '841E26': 'KERNEL-I Co.,LTD', 'B4346C': 'MATSUNICHI DIGITAL TECHNOLOGY (HONG KONG) LIMITED', '0086A0': 'Private', 'A05B21': 'ENVINET GmbH', '50B8A2': 'ImTech Technologies LLC,', 'B04C05': 'Fresenius Medical Care Deutschland GmbH', 'B0793C': 'Revolv Inc', '9C4EBF': 'BoxCast', '34A843': 'KYOCERA Display Corporation', '74CA25': 'Calxeda, Inc.', '5CA3EB': 'Lokel s.r.o.', 'C8B373': 'Cisco-Linksys, LLC', '0C2AE7': 'Beijing General Research Institute of Mining and Metallurgy', '983071': 'DAIKYUNG VASCOM', 'D49524': 'Clover Network, Inc.', '945047': 'Rechnerbetriebsgruppe', 'E031D0': 'SZ Telstar CO., LTD', '54112F': 'Sulzer Pump Solutions Finland Oy', '4C55B8': 'Turkcell Teknoloji', '088039': 'Cisco SPVTG', 'E438F2': 'Advantage Controls', 'C4C755': 'Beijing HuaqinWorld Technology Co.,Ltd', '0C2D89': 'QiiQ Communications Inc.', 'A8D236': 'Lightware Visual Engineering', '981094': 'Shenzhen Vsun communication technology Co.,ltd', 'A4F3C1': 'Open Source Robotics Foundation, Inc.', '141330': 'Anakreon UK LLP', '0CF405': 'Beijing Signalway Technologies Co.,Ltd', '5061D6': 'Indu-Sol GmbH', 'DC7014': 'Private', '788DF7': 'Hitron Technologies. Inc', '2C245F': 'Babolat VS', '905692': 'Autotalks Ltd.', '04BFA8': 'ISB Corporation', '8CC7D0': 'zhejiang ebang communication co.,ltd', 'B8AE6E': 'Nintendo Co., Ltd.', 'D0EB03': 'Zhehua technology limited', '683EEC': 'ERECA', 'C42628': 'Airo Wireless', '30AABD': 'Shanghai Reallytek Information Technology Co.,Ltd', 'A4B818': 'PENTA Gesellschaft für elektronische Industriedatenverarbeitung mbH', 'C04DF7': 'SERELEC', '0C8484': 'Zenovia Electronics Inc.', '005907': 'LenovoEMC Products USA, LLC', '50A715': 'Aboundi, Inc.', '0C0400': 'Jantar d.o.o.', '687CD5': 'Y Soft Corporation, a.s.', '907AF1': 'Wally', '2CB693': 'Radware', 'A861AA': 'Cloudview Limited', 'FC1186': 'Logic3 plc', 'E01877': 'FUJITSU LIMITED', 'E457A8': 'Stuart Manufacturing, Inc.', '789966': 'Musilab Electronics (DongGuan)Co.,Ltd.', '28CBEB': 'One', '7CA15D': 'GN ReSound A/S', '3C081E': 'Beijing Yupont Electric Power Technology Co.,Ltd', 'FC58FA': 'Shen Zhen Shi Xin Zhong Xin Technology Co.,Ltd.', '4CCC34': 'Motorola Solutions Inc.', 'D0D471': 'MVTECH co., Ltd', '0868D0': 'Japan System Design', 'D4223F': 'Lenovo Mobile Communication Technology Ltd.', 'C8EEA6': 'Shenzhen SHX Technology Co., Ltd', '2481AA': 'KSH International Co., Ltd.', 'AC4122': 'Eclipse Electronic Systems Inc.', '6897E8': 'Society of Motion Picture &amp; Television Engineers', 'E8E875': 'iS5 Communications Inc.', 'C80E95': 'OmniLync Inc.', '30055C': 'Brother industries, LTD.', '080EA8': 'Velex s.r.l.', 'B8C46F': 'PRIMMCON INDUSTRIES INC', 'D8B02E': 'Guangzhou Zonerich Business Machine Co., LTD.', 'C4E032': 'IEEE 1904.1 Working Group', '58EB14': 'Proteus Digital Health', 'C458C2': 'Shenzhen TATFOOK Technology Co., Ltd.', 'D0CDE1': 'Scientech Electronics', '5CE0CA': 'FeiTian United (Beijing) System Technology Co., Ltd.', 'E08177': 'GreenBytes, Inc.', '9C9811': 'Guangzhou Sunrise Electronics Development Co., Ltd', 'B86091': 'Onnet Technologies and Innovations LLC', '8C76C1': 'Goden Tech Limited', '8C078C': 'FLOW DATA INC', 'F8DFA8': 'zte corporation', 'A895B0': 'Aker Subsea Ltd', '104D77': 'Innovative Computer Engineering', 'C45DD8': 'HDMI Forum', 'C4EBE3': 'RRCN SAS', '94756E': 'QinetiQ North America', '4C1A95': 'Novakon Co., Ltd.', '60BB0C': 'Beijing HuaqinWorld Technology Co,Ltd', 'A42C08': 'Masterwork Automodules', '10B9FE': 'Lika srl', '301518': 'Ubiquitous Communication Co. ltd.', '841715': 'GP Electronics (HK) Ltd.', '848E96': 'Embertec Pty Ltd', '6499A0': 'AG Elektronik AB', '08F1B7': 'Towerstream Corpration', 'C044E3': 'Shenzhen Sinkna Electronics Co., LTD', '18550F': 'Cisco SPVTG', '187A93': 'AMICCOM Electronics Corporation', '8887DD': 'DarbeeVision Inc.', '30C82A': 'WI-BIZ srl', '88A3CC': 'Amatis Controls', 'C0A0C7': 'FAIRFIELD INDUSTRIES', 'DCA989': 'MACANDC', 'A00363': 'Robert Bosch Healthcare GmbH', 'D0B498': 'Robert Bosch LLC Automotive Electronics', 'E05597': 'Emergent Vision Technologies Inc.', '7C438F': 'E-Band Communications Corp.', 'A0E25A': 'Amicus SK, s.r.o.', 'D40FB2': 'Applied Micro Electronics AME bv', '449B78': 'The Now Factory', 'F0F669': 'Motion Analysis Corporation', '78995C': 'Nationz Technologies Inc', '849DC5': 'Centera Photonics Inc.', '580943': 'Private', 'ECFC55': 'A. Eberle GmbH & Co. KG', '182A7B': 'Nintendo Co., Ltd.', '68FB95': 'Generalplus Technology Inc.', 'F8F082': 'NAG LLC', '5C89D4': 'Beijing Banner Electric Co.,Ltd', '54115F': 'Atamo Pty Ltd', '8CAE4C': 'Plugable Technologies', '0CC655': 'Wuxi YSTen Technology Co.,Ltd.', '242FFA': 'Toshiba Global Commerce Solutions', 'E496AE': 'ALTOGRAPHICS Inc.', '4C2258': 'cozybit, Inc.', 'F49466': 'CountMax, ltd', 'F45214': 'Mellanox Technologies, Inc.', '1C959F': 'Veethree Electronics And Marine LLC', '703811': 'Invensys Rail', '0881F4': 'Juniper Networks', '10F49A': 'T3 Innovation', '3C57BD': 'Kessler Crane Inc.', '04E9E5': 'PJRC.COM, LLC', '60BD91': 'Move Innovation', 'CC4BFB': 'Hellberg Safety AB', '6CADEF': 'KZ Broadband Technologies, Ltd. ', '745FAE': 'TSL PPL', '6851B7': 'PowerCloud Systems, Inc.', '742D0A': 'Norfolk Elektronik AG', '70F1E5': 'Xetawave LLC', 'C0AA68': 'OSASI Technos Inc.', '88D7BC': 'DEP Company', '485A3F': 'WISOL', '60BC4C': 'EWM Hightec Welding GmbH', '1C11E1': 'Wartsila Finland Oy', '50465D': 'ASUSTek COMPUTER INC.', '74BFA1': 'HYUNTECK', 'CC262D': 'Verifi, LLC', '3C8AE5': 'Tensun Information Technology(Hangzhou) Co.,LTD', '2C5AA3': 'PROMATE ELECTRONIC CO.LTD', '34E0CF': 'zte corporation', '08B738': 'Lite-On Technogy Corp.', 'F8AA8A': 'Axview Technology (Shenzhen) Co.,Ltd', '7C0187': 'Curtis Instruments, Inc.', '54F666': 'Berthold Technologies GmbH and Co.KG', '34C803': 'Nokia Corporation', 'F05F5A': 'Getriebebau NORD GmbH and Co. KG', '801DAA': 'Avaya Inc', '7C092B': 'Bekey A/S', '842BBC': 'Modelleisenbahn GmbH', 'B4009C': 'CableWorld Ltd.', '289EDF': 'Danfoss Turbocor Compressors, Inc', '803FD6': 'bytes at work AG', '784405': 'FUJITU(HONG KONG) ELECTRONIC Co.,LTD.', '044A50': 'Ramaxel Technology (Shenzhen) limited company', '0CD9C1': 'Visteon Corporation', '38A5B6': 'SHENZHEN MEGMEET ELECTRICAL CO.,LTD', '68AB8A': 'RF IDeas', '24EE3A': 'Chengdu Yingji Electronic Hi-tech Co Ltd', '0CC66A': 'Nokia Corporation', '74273C': 'ChangYang Technology (Nanjing) Co., LTD', '087CBE': 'Quintic Corp.', 'E804F3': 'Throughtek Co., Ltd.', '0868EA': 'EITO ELECTRONICS CO., LTD.', 'F82285': 'Cypress Technology CO., LTD.', 'C4AD21': 'MEDIAEDGE Corporation', 'E85BF0': 'Imaging Diagnostics', 'A40BED': 'Carry Technology Co.,Ltd', '702393': 'fos4X GmbH', '64D814': 'Cisco Systems, Inc', 'F85F2A': 'Nokia Corporation', 'C438D3': 'TAGATEC CO.,LTD', '502ECE': 'Asahi Electronics Co.,Ltd', 'AC14D2': 'wi-daq, inc.', '9C4CAE': 'Mesa Labs', '20C1AF': 'i Wit Digital Co., Limited', '80AAA4': 'USAG', '30AEF6': 'Radio Mobile Access', '085B0E': 'Fortinet, Inc.', 'EC42F0': 'ADL Embedded Solutions, Inc.', 'E8CBA1': 'Nokia Corporation', '6CE4CE': 'Villiger Security Solutions AG', '649FF7': 'Kone OYj', 'CC912B': 'TE Connectivity Touch Solutions', 'C05E79': 'SHENZHEN HUAXUN ARK TECHNOLOGIES CO.,LTD', '58BFEA': 'Cisco Systems, Inc', 'C401B1': 'SeekTech INC', 'C0C946': 'MITSUYA LABORATORIES INC.', 'F4600D': 'Panoptic Technology, Inc', 'A82BD6': 'Shina System Co., Ltd', 'ACCF23': 'Hi-flying electronics technology Co.,Ltd', '609084': 'DSSD Inc', 'FC1D59': 'I Smart Cities HK Ltd', '78C4AB': 'Shenzhen Runsil Technology Co.,Ltd', 'B0A86E': 'Juniper Networks', '802AFA': 'Germaneers GmbH', '18421D': 'Private', '28C914': 'Taimag Corporation', '7493A4': 'Zebra Technologies Corp.', 'E47185': 'Securifi Ltd', '080CC9': 'Mission Technology Group, dba Magma', '640E94': 'Pluribus Networks, Inc.', '0CB4EF': 'Digience Co.,Ltd.', '146A0B': 'Cypress Electronics Limited', 'F490EA': 'Deciso B.V.', '5CEE79': 'Global Digitech Co LTD', '4CAA16': 'AzureWave Technologies (Shanghai) Inc.', 'AC40EA': 'C&T Solution Inc. ', '002AAF': 'LARsys-Automation GmbH', '1CE165': 'Marshal Corporation', '4016FA': 'EKM Metering', '0C130B': 'Uniqoteq Ltd.', '2C542D': 'Cisco Systems, Inc', 'BC1401': 'Hitron Technologies. Inc', '94CA0F': 'Honeywell Analytics', '782544': 'Omnima Limited', 'A41875': 'Cisco Systems, Inc', 'C8AE9C': 'Shanghai TYD Elecronic Technology Co. Ltd', 'AC3FA4': 'TAIYO YUDEN CO.,LTD', '6CAE8B': 'IBM Corporation', '40AC8D': 'Data Management, Inc.', '80CEB1': 'Theissen Training Systems GmbH', 'FC2A54': 'Connected Data, Inc.', '045C06': 'Zmodo Technology Corporation', '747B7A': 'ETH Inc.', '48EA63': 'Zhejiang Uniview Technologies Co., Ltd.', 'E88DF5': 'ZNYX Networks, Inc.', '90F72F': 'Phillips Machine & Welding Co., Inc. ', 'D05785': 'Pantech Co., Ltd.', '408B07': 'Actiontec Electronics, Inc', '284121': 'OptiSense Network, LLC', '38458C': 'MyCloud Technology corporation', '10E4AF': 'APR, LLC', 'F4EA67': 'Cisco Systems, Inc', '2C2D48': 'bct electronic GesmbH', '28BA18': 'NextNav, LLC', 'AC3D75': 'HANGZHOU ZHIWAY TECHNOLOGIES CO.,LTD.', 'A090DE': 'VEEDIMS,LLC', '642DB7': 'SEUNGIL ELECTRONICS', '002A6A': 'Cisco Systems, Inc', 'F436E1': 'Abilis Systems SARL', '781C5A': 'SHARP Corporation', 'E80C75': 'Syncbak, Inc.', '800A06': 'COMTEC co.,ltd', '608C2B': 'Hanson Technology', '940070': 'Nokia Corporation', 'BC2C55': 'Bear Flag Design, Inc.', '0C7523': 'BEIJING GEHUA CATV NETWORK CO.,LTD', '04F021': 'Compex Systems Pte Ltd', '2818FD': 'Aditya Infotech Ltd.', 'D8B90E': 'Triple Domain Vision Co.,Ltd.', '342F6E': 'Anywire corporation', 'CCEED9': 'VAHLE Automation GmbH', '005CB1': 'Gospell DIGITAL TECHNOLOGY CO., LTD', 'B08E1A': 'URadio Systems Co., Ltd', 'D8E952': 'KEOPSYS', 'BCA4E1': 'Nabto', '908FCF': 'UNO System Co., Ltd', '40E793': 'Shenzhen Siviton Technology Co.,Ltd', '000831': 'Cisco Systems, Inc', '34D09B': 'MobilMAX Technology Inc.', 'F0007F': 'Janz - Contadores de Energia, SA', '30B3A2': 'Shenzhen Heguang Measurement & Control Technology Co.,Ltd', '506028': 'Xirrus Inc.', '0091FA': 'Synapse Product Development', 'A05AA4': 'Grand Products Nevada, Inc.', 'F0EEBB': 'VIPAR GmbH', '6CE907': 'Nokia Corporation', 'E4FA1D': 'PAD Peripheral Advanced Design Inc.', '1C5C55': 'PRIMA Cinema, Inc', '34BA9A': 'Asiatelco Technologies Co.', '506441': 'Greenlee', '9C1FDD': 'Accupix Inc.', '7CDD11': 'Chongqing MAS SCI&TECH.Co.,Ltd', 'B8FD32': 'Zhejiang ROICX Microelectronics', '70EE50': 'Netatmo', '984A47': 'CHG Hospital Beds', '144978': 'Digital Control Incorporated', '2C10C1': 'Nintendo Co., Ltd.', '8CD17B': 'CG Mobile', '502267': 'PixeLINK', '3C6A7D': 'Niigata Power Systems Co., Ltd.', '3C7059': 'MakerBot Industries', '502690': 'FUJITSU LIMITED', '24B657': 'Cisco Systems, Inc', 'C8AF40': 'marco Systemanalyse und Entwicklung GmbH', '40984C': 'Casacom Solutions AG', '5C18B5': 'Talon Communications', '64E161': 'DEP Corp.', '8823FE': 'TTTech Computertechnik AG', 'B89AED': 'OceanServer Technology, Inc', 'C87D77': 'Shenzhen Kingtech Communication Equipment Co.,Ltd', '94AE61': 'Alcatel Lucent', '5CCEAD': 'CDYNE Corporation', 'AC54EC': 'IEEE P1823 Standards Working Group', '709756': 'Happyelectronics Co.,Ltd', 'B820E7': 'Guangzhou Horizontal Information & Network Integration Co. Ltd', '00CD90': 'MAS Elektronik AG', '7C6B52': 'Tigaro Wireless', '0064A6': 'Maquet CardioVascular', '988BAD': 'Corintech Ltd.', 'D44B5E': 'TAIYO YUDEN CO., LTD.', '640E36': 'TAZTAG', '941D1C': 'TLab West Systems AB', 'E455EA': 'Dedicated Computing', 'B05CE5': 'Nokia Corporation', '3482DE': 'Kiio Inc', '4C5FD2': 'Alcatel-Lucent', '28C718': 'Altierre', '7C4C58': 'Scale Computing, Inc.', '1013EE': 'Justec International Technology INC.', '8C271D': 'QuantHouse', '386077': 'PEGATRON CORPORATION', '708105': 'Cisco Systems, Inc', 'E0ED1A': 'vastriver Technology Co., Ltd', 'D4F63F': 'IEA S.R.L.', '58B0D4': 'ZuniData Systems Inc.', '64557F': 'NSFOCUS Information Technology Co., Ltd.', '00082F': 'Cisco Systems, Inc', '9CC7D1': 'SHARP Corporation', '149090': 'KongTop industrial(shen zhen)CO.,LTD', '38DE60': 'Mohlenhoff GmbH', '2839E7': 'Preceno Technology Pte.Ltd.', '685E6B': 'PowerRay Co., Ltd.', '20C8B3': 'SHENZHEN BUL-TECH CO.,LTD.', 'F8E7B5': 'µTech Tecnologia LTDA', 'D4CEB8': 'Enatel LTD', '807A7F': 'ABB Genway Xiamen Electrical Equipment CO., LTD', '24DAB6': 'Sistemas de Gestión Energética S.A. de C.V', 'B07D62': 'Dipl.-Ing. H. Horstmann GmbH', 'B8F5E7': 'WayTools, LLC', 'B81999': 'Nesys', '34255D': 'Shenzhen Loadcom Technology Co.,Ltd', '4CA74B': 'Alcatel Lucent', 'D03110': 'Ingenic Semiconductor Co.,Ltd', '1CE192': 'Qisda Corporation', '706F81': 'Private', 'FC0012': 'Toshiba Samsung Storage Technolgoy Korea Corporation ', '181420': 'TEB SAS', 'AC81F3': 'Nokia Corporation', '30688C': 'Reach Technology Inc.', '10EED9': 'Canoga Perkins Corporation', '94DE0E': 'SmartOptics AS', 'C029F3': 'XySystem', 'AC4AFE': 'Hisense Broadband Multimedia Technology Co.,Ltd.', '54F5B6': 'ORIENTAL PACIFIC INTERNATIONAL LIMITED', '90342B': 'Gatekeeper Systems, Inc.', '8CB82C': 'IPitomy Communications', '807DE3': 'Chongqing Sichuan Instrument Microcircuit Co.LTD.', 'DC175A': 'Hitachi High-Technologies Corporation', 'C8A1BA': 'Neul Ltd', 'C43A9F': 'Siconix Inc.', '686E23': 'Wi3 Inc.', 'DCF05D': 'Letta Teknoloji', '5C16C7': 'Big Switch Networks', '848F69': 'Dell Inc.', '3C096D': 'Powerhouse Dynamics', '900D66': 'Digimore Electronics Co., Ltd', '0C924E': 'Rice Lake Weighing Systems', 'F49461': 'NexGen Storage', 'B8CDA7': 'Maxeler Technologies Ltd.', '5435DF': 'Symeo GmbH', 'F43D80': 'FAG Industrial Services GmbH', 'F0DB30': 'Yottabyte', '9C31B6': 'Kulite Semiconductor Products Inc', 'A4B36A': 'JSC SDO Chromatec', 'E4DD79': 'En-Vision America, Inc.', 'E8CC32': 'Micronet LTD', 'D43AE9': 'DONGGUAN ipt INDUSTRIAL CO., LTD', '589835': 'Technicolor', '8C5CA1': 'd-broad,INC', '18F650': 'Multimedia Pacific Limited', '688470': 'eSSys Co.,Ltd', '48DCFB': 'Nokia Corporation', '20B7C0': 'OMICRON electronics GmbH', '8058C5': 'NovaTec Kommunikationstechnik GmbH', 'B8C716': 'Fiberhome Telecommunication Technologies Co.,LTD', 'D42C3D': 'Sky Light Digital Limited', 'A45A1C': 'smart-electronic GmbH', '806459': 'Nimbus Inc.', '8C89A5': "Micro-Star INT'L CO., LTD", 'B4A5A9': 'MODI GmbH', 'E8C320': 'Austco Communication Systems Pty Ltd', 'C436DA': 'Rusteletech Ltd.', '0432F4': 'Partron', '1C184A': 'ShenZhen RicherLink Technologies Co.,LTD', '0C3956': 'Observator instruments', 'DCA6BD': 'Beijing Lanbo Technology Co., Ltd.', '10C586': 'BIO SOUND LAB CO., LTD.', '10768A': 'EoCell', 'F44EFD': 'Actions Semiconductor Co.,Ltd.(Cayman Islands)', '24B8D2': 'Opzoon Technology Co.,Ltd.', 'A49981': 'FuJian Elite Power Tech CO.,LTD.', 'B83A7B': 'Worldplay (Canada) Inc.', '1407E0': 'Abrantix AG', 'DCCF94': 'Beijing Rongcheng Hutong Technology Co., Ltd.', 'A4DB2E': 'Kingspan Environmental Ltd', '605464': 'Eyedro Green Solutions Inc.', 'C8FE30': 'Bejing DAYO Mobile Communication Technology Ltd.', 'E4D71D': 'Oraya Therapeutics', '24C9DE': 'Genoray', '54055F': 'Alcatel Lucent', '6C5D63': 'ShenZhen Rapoo Technology Co., Ltd.', '941673': 'Point Core SARL', '5C56ED': '3pleplay Electronics Private Limited', '78028F': 'Adaptive Spectrum and Signal Alignment (ASSIA), Inc.', 'DC16A2': 'Medtronic Diabetes', '308CFB': 'Dropcam', 'D0EB9E': 'Seowoo Inc.', 'BCCD45': 'VOISMART', '143E60': 'Nokia', '7032D5': 'Athena Wireless Communications Inc', '78510C': 'LiveU Ltd.', '44AAE8': 'Nanotec Electronic GmbH & Co. KG', 'D428B2': 'ioBridge, Inc.', '8427CE': 'Corporation of the Presiding Bishop of The Church of Jesus Christ of Latter-day Saints', '48D8FE': 'ClarIDy Solutions, Inc.', 'D4945A': 'COSMO CO., LTD', '304C7E': 'Panasonic Electric Works Automation Controls Techno Co.,Ltd.', '5CF207': 'Speco Technologies', 'B42A39': 'ORBIT MERRET, spol. s r. o.', '70E843': 'Beijing C&W Optical Communication Technology Co.,Ltd.', '2C7ECF': 'Onzo Ltd', '103711': 'Simlink AS', '50E549': 'GIGA-BYTE TECHNOLOGY CO.,LTD.', 'B4B88D': 'Thuh Company', '4C73A5': 'KOVE', '70A41C': 'Advanced Wireless Dynamics S.L.', 'BCBBC9': 'Kellendonk Elektronik GmbH', 'E42AD3': 'Magneti Marelli S.p.A. Powertrain', 'E83EB6': 'RIM', 'BC35E5': 'Hydro Systems Company', '9C5D95': 'VTC Electronics Corp.', 'B8A8AF': 'Logic S.p.A.', '60F673': 'TERUMO CORPORATION', '28CCFF': 'Corporacion Empresarial Altra SL', '94FD1D': 'WhereWhen Corp', '4C07C9': 'COMPUTER OFFICE Co.,Ltd.', 'F8769B': 'Neopis Co., Ltd.', '74B00C': 'Network Video Technologies, Inc', 'E84040': 'Cisco Systems, Inc', 'D89DB9': 'eMegatech International Corp.', '405A9B': 'ANOVO', 'E06995': 'PEGATRON CORPORATION', '84DE3D': 'Crystal Vision Ltd', 'D075BE': 'Reno A&E', 'BC6E76': 'Green Energy Options Ltd', 'E828D5': 'Cots Technology', 'F8DAF4': 'Taishan Online Technology Co., Ltd.', '08D5C0': 'Seers Technology Co., Ltd', '6C33A9': 'Magicjack LP', '108CCF': 'Cisco Systems, Inc', 'D8E3AE': 'CIRTEC MEDICAL SYSTEMS', '08B7EC': 'Wireless Seismic', '18AF9F': 'DIGITRONIC Automationsanlagen GmbH', '00B342': 'MacroSAN Technologies Co., Ltd.', '1CF5E7': 'Turtle Industry Co., Ltd.', '980EE4': 'Private', '447DA5': 'VTION INFORMATION TECHNOLOGY (FUJIAN) CO.,LTD', '0CCDD3': 'EASTRIVER TECHNOLOGY CO., LTD.', 'E46C21': 'messMa GmbH', '00B033': 'OAO Izhevskiy radiozavod', '081735': 'Cisco Systems, Inc', 'C89C1D': 'Cisco Systems, Inc', 'E437D7': 'HENRI DEPAEPE S.A.S.', 'E0A1D7': 'SFR', '9481A4': 'Azuray Technologies', 'BCE09D': 'Eoslink', '9C220E': 'TASCAN Systems GmbH', '7CDD90': 'Shenzhen Ogemray Technology Co., Ltd.', '0C3C65': 'Dome Imaging Inc', 'C8DF7C': 'Nokia Corporation', 'B44CC2': 'NR ELECTRIC CO., LTD', '48CB6E': 'Cello Electronics (UK) Ltd', 'BC4377': 'Hang Zhou Huite Technology Co.,ltd.', 'CC7669': 'SEETECH', 'AC20AA': 'DMATEK Co., Ltd.', 'FCAF6A': 'Qulsar Inc', '346F92': 'White Rodgers Division', '34BDF9': 'Shanghai WDK Industrial Co.,Ltd.', 'CCBE71': 'OptiLogix BV', '0C469D': 'MS Sedco', 'B09AE2': 'STEMMER IMAGING GmbH', '14EE9D': 'AirNav Systems LLC', '78D004': 'Neousys Technology Inc.', '8895B9': 'Unified Packet Systems Crop', 'D8FE8F': 'IDFone Co., Ltd.', '888C19': 'Brady Corp Asia Pacific Ltd', '448C52': 'KTIS CO., Ltd', '006DFB': 'Vutrix Technologies Ltd', '841888': 'Juniper Networks', '9067B5': 'Alcatel-Lucent', 'E0F379': 'Vaddio', '78B6C1': 'AOBO Telecom Co.,Ltd', '08D29A': 'Proformatique', 'C89383': 'Embedded Automation, Inc.', '78A051': 'iiNet Labs Pty Ltd ', '804F58': 'ThinkEco, Inc.', '0475F5': 'CSST', '8C4DEA': 'Cerio Corporation', '24BA30': 'Technical Consumer Products, Inc.', '188ED5': 'TP Vision Belgium N.V. - innovation site Brugge', 'E80C38': 'DAEYOUNG INFORMATION SYSTEM CO., LTD', 'E08A7E': 'Exponent', 'A8B0AE': 'LEONI ', 'E42771': 'Smartlabs', '34DF2A': 'Fujikon Industrial Co.,Limited', '2CDD0C': 'Discovergy GmbH', '40B2C8': 'Nortel Networks', '70A191': 'Trendsetter Medical, LLC', '708B78': 'citygrow technology co., ltd', '64317E': 'Dexin Corporation', '3C99F7': 'Lansentechnology AB', '507D02': 'BIODIT', 'B4A4E3': 'Cisco Systems, Inc', '8C1F94': 'RF Surgical System Inc. ', '4491DB': 'Shanghai Huaqin Telecom Technology Co.,Ltd', '14D76E': 'CONCH ELECTRONIC Co.,Ltd', 'AC83F0': 'ImmediaTV Corporation', 'CC6B98': 'Minetec Wireless Technologies', '3C04BF': 'PRAVIS SYSTEMS Co.Ltd.,', '94DD3F': 'A+V Link Technologies, Corp.', 'F44227': 'S & S Research Inc.', 'C8A729': 'SYStronics Co., Ltd.', '4454C0': 'Thompson Aerospace', 'C4F464': 'Spica international', '602A54': 'CardioTek B.V.', 'BCFFAC': 'TOPCON CORPORATION', '445EF3': 'Tonalite Holding B.V.', '68DB96': 'OPWILL Technologies CO .,LTD', '7C55E7': 'YSI, Inc.', '70B08C': 'Shenou Communication Equipment Co.,Ltd', 'C03B8F': 'Minicom Digital Signage', '20FEDB': 'M2M Solution S.A.S.', '0C8D98': 'TOP EIGHT IND CORP', '40C7C9': 'Naviit Inc.', '7CBB6F': 'Cosco Electronics Co., Ltd.', '94A7BC': 'BodyMedia, Inc.', 'C8A1B6': 'Shenzhen Longway Technologies Co., Ltd', 'A8556A': 'Pocketnet Technology Inc.', 'B4C810': 'UMPI Elettronica', '64A232': 'OOO Samlight', '64FC8C': 'Zonar Systems', 'D0574C': 'Cisco Systems, Inc', 'F8DAE2': 'NDC Technologies', '705EAA': 'Action Target, Inc.', '34F968': 'ATEK Products, LLC', '20B0F7': 'Enclustra GmbH', '543131': 'Raster Vision Ltd', 'D0E347': 'Yoga', 'F0ED1E': 'Bilkon Bilgisayar Kontrollu Cih. Im.Ltd.', 'C416FA': 'Prysm Inc', '506F9A': 'Wi-Fi Alliance', '842914': 'EMPORIA TELECOM Produktions- und VertriebsgesmbH & Co KG', 'BC7DD1': 'Radio Data Comms', '585076': 'Linear Equipamentos Eletronicos SA', 'F0F9F7': 'IES GmbH & Co. KG', '38580C': 'Panaccess Systems GmbH', '4451DB': 'Raytheon BBN Technologies', 'DCFAD5': 'STRONG Ges.m.b.H.', '6C8D65': 'Wireless Glue Networks, Inc.', '9803A0': 'ABB n.v. Power Quality Products', 'CC43E3': 'Trump s.a.', 'F8C091': 'Highgates Technology', 'AC9B84': 'Smak Tecnologia e Automacao', '90F278': 'Radius Gateway', '806629': 'Prescope Technologies CO.,LTD.', '241F2C': 'Calsys, Inc.', 'F0BDF1': 'Sipod Inc.', '646707': 'Beijing Omnific Technology, Ltd.', '58FD20': 'Bravida Sakerhet AB', 'ACA016': 'Cisco Systems, Inc', '58E747': 'Deltanet AG', '404022': 'ZIV', 'A85BB0': 'Shenzhen Dehoo Technology Co.,Ltd', '44A689': 'PROMAX ELECTRONICA SA', '40618E': 'Stella-Green Co', '68E41F': 'Unglaube Identech GmbH', '4C60D5': 'airPointe of New Hampshire', '888717': 'CANON INC.', '6CDC6A': 'Promethean Limited', '9055AE': 'Ericsson, EAB/RWI/K', '1010B6': 'McCain Inc', '009363': 'Uni-Link Technology Co., Ltd.', 'D4823E': 'Argosy Technologies, Ltd.', '003532': 'Electro-Metrics Corporation', '081FF3': 'Cisco Systems, Inc', '44376F': 'Young Electric Sign Co', '389F83': 'OTN Systems N.V.', 'BC6A16': 'tdvine', '003A9D': 'NEC Platforms, Ltd.', '28CD4C': 'Individual Computers GmbH', '8C53F7': 'A&D ENGINEERING CO., LTD.', '7C7673': 'ENMAS GmbH', '6C6F18': 'Stereotaxis, Inc.', '84C727': 'Gnodal Ltd', '087695': 'Auto Industrial Co., Ltd.', 'ACCE8F': 'HWA YAO TECHNOLOGIES CO., LTD', '8C9236': 'Aus.Linx Technology Co., Ltd.', '10C73F': 'Midas Klark Teknik Ltd', 'F8912A': 'GLP German Light Products GmbH', '44E49A': 'OMNITRONICS PTY LTD', '08F2F4': 'Net One Partners Co.,Ltd.', '0C7D7C': 'Kexiang Information Technology Co, Ltd.', '3037A6': 'Cisco Systems, Inc', 'DC1D9F': 'U & B tech', 'D8E72B': 'NetAlly', '785C72': 'Hioso Technology Co., Ltd.', '580556': 'Elettronica GF S.r.L.', 'B09074': 'Fulan Electronics Limited', '94F692': 'Geminico co.,Ltd.', '68EFBD': 'Cisco Systems, Inc', 'F02408': 'Talaris (Sweden) AB', '8081A5': 'TONGQING COMMUNICATION EQUIPMENT (SHENZHEN) Co.,Ltd', 'B482FE': 'ASKEY COMPUTER CORP', '307C30': 'RIM', 'BC4E3C': 'CORE STAFF CO., LTD.', '502A8B': 'Telekom Research and Development Sdn Bhd', 'EC43E6': 'AWCER Ltd.', '7812B8': 'ORANTEK LIMITED', '98BC99': 'Edeltech Co.,Ltd.', 'F02FD8': 'Bi2-Vision', '544249': 'Sony Corporation', '904716': 'RORZE CORPORATION', '10445A': 'Shaanxi Hitech Electronic Co., LTD', 'F47626': 'Viltechmeda UAB ', '0C17F1': 'TELECSYS', '003A9B': 'Cisco Systems, Inc', '2C3427': 'ERCO & GENER', '80912A': 'Lih Rong electronic Enterprise Co., Ltd.', '7C2F80': 'Gigaset Communications GmbH', '10B7F6': 'Plastoform Industries Ltd.', '448E81': 'VIG', '8894F9': 'Gemicom Technology, Inc.', '502A7E': 'Smart electronic GmbH', '5C8778': 'Cybertelbridge co.,ltd', '38BB23': 'OzVision America LLC', '0C8411': 'A.O. Smith Water Products', 'E0ABFE': 'Orb Networks, Inc.', '448312': 'Star-Net', 'A05DE7': 'DIRECTV, Inc.', '087618': 'ViE Technologies Sdn. Bhd.', 'D0E40B': 'Wearable Inc.', '747E1A': 'Red Embedded Design Limited', '14A86B': 'ShenZhen Telacom Science&Technology Co., Ltd', '0CC3A7': 'Meritec', 'DCE2AC': 'Lumens Digital Optics Inc.', '98D88C': 'Nortel Networks', '78192E': 'NASCENT Technology', '48EB30': 'ETERNA TECHNOLOGY, INC.', '4C322D': 'TELEDATA NETWORKS', 'AC867E': 'Create New Technology (HK) Limited Company', '8C598B': 'C Technologies AB', 'D44CA7': 'Informtekhnika & Communication, LLC', 'A8C222': 'TM-Research Inc.', '003D41': 'Hatteland Computer AS', 'CC5076': 'Ocom Communications, Inc.', '4CC452': 'Shang Hai Tyd. Electon Technology Ltd.', '7CCB0D': 'Antaira Technologies, LLC', 'C01E9B': 'Pixavi AS', '803B9A': 'ghe-ces electronic ag', '743256': 'NT-ware Systemprg GmbH', 'C4E17C': 'U2S co.', '20BFDB': 'DVL', '20415A': 'Smarteh d.o.o.', 'A4DA3F': 'Bionics Corp.', 'A04025': 'Actioncable, Inc.', '4C4B68': 'Mobile Device, Inc. ', '201257': 'Most Lucky Trading Ltd', 'E8DAAA': 'VideoHome Technology Corp.', '647D81': 'YOKOTA INDUSTRIAL CO,.LTD', '7CCFCF': 'Shanghai SEARI Intelligent System Co., Ltd', '68AAD2': 'DATECS LTD.,', 'A4DE50': 'Total Walther GmbH', '1CF061': 'SCAPS GmbH', 'A893E6': 'JIANGXI JINGGANGSHAN CKING COMMUNICATION TECHNOLOGY CO.,LTD', 'C4AAA1': 'SUMMIT DEVELOPMENT, spol.s r.o.', '3032D4': 'Hanilstm Co., Ltd.', 'E064BB': 'DigiView S.r.l.', 'DC3350': 'TechSAT GmbH', 'F0BCC8': 'MaxID (Pty) Ltd', '24828A': 'Prowave Technologies Ltd.', '68CC9C': 'Mine Site Technologies', '146E0A': 'Private', '0CE709': 'Fox Crypto B.V.', 'B4B5AF': 'Minsung Electronics', '04B3B6': 'Seamap (UK) Ltd', '00270B': 'Adura Technologies', '00270D': 'Cisco Systems, Inc', '00271B': 'Alec Sicherheitssysteme GmbH', '002718': 'Suzhou NEW SEAUNION Video Technology Co.,Ltd', '6C0F6A': 'JDC Tech Co., Ltd.', '04B466': 'BSP Co., Ltd.', 'D8D67E': 'GSK CNC EQUIPMENT CO.,LTD', '0026AE': 'Wireless Measurement Ltd', '0026B1': 'Navis Auto Motive Systems, Inc.', '0026AA': 'Kenmec Mechanical Engineering Co., Ltd.', '0026D2': 'Pcube Systems, Inc.', '0026CD': 'PurpleComm, Inc.', '002707': 'Lift Complex DS, JSC', '0026D7': 'KM Electornic Technology Co., Ltd.', '0026D0': 'Semihalf', '0026FE': 'MKD Technology Inc.', '0026A0': 'moblic', '0026E5': 'AEG Power Solutions', '0026E3': 'DTI', '0026BC': 'General Jack Technology Ltd.', '002696': 'NOOLIX Co., Ltd', '00269A': 'Carina System Co., Ltd.', '002695': "ZT Group Int'l Inc", '002693': 'QVidium Technologies, Inc.', '002665': 'ProtectedLogic Corporation', '002660': 'Logiways', '002670': 'Cinch Connectors', '002671': 'AUTOVISION Co., Ltd', '002648': 'Emitech Corp.', '002645': 'Circontrol S.A.', '00263E': 'Trapeze Networks', '00263C': 'Bachmann Technology GmbH & Co. KG', '00263D': 'MIA Corporation', '002678': 'Logic Instrument SA', '002677': 'DEIF A/S', '00268E': 'Alta Solutions, Inc.', '002688': 'Juniper Networks', '0025DA': 'Secura Key', '0025DB': 'ATI Electronics(Shenzhen) Co., LTD', '0025D5': 'Robonica (Pty) Ltd', '0025E2': 'Everspring Industry Co., Ltd.', '0025E1': 'SHANGHAI SEEYOO ELECTRONIC & TECHNOLOGY CO., LTD', '00260E': 'Ablaze Systems, LLC', '002610': 'Apacewave Technologies', '00260D': 'Mercury Systems, Inc.', '00260A': 'Cisco Systems, Inc', '002632': 'Instrumentation Technologies d.d.', '00262E': 'Chengdu Jiuzhou Electronic Technology Inc', '00262C': 'IKT Advanced Technologies s.r.o.', '002629': 'Juphoon System Software Inc.', '002625': 'MediaSputnik', '002626': 'Geophysical Survey Systems, Inc.', '0025CC': 'Mobile Communications Korea Incorporated', '0025F9': 'GMK electronic design GmbH', '0025F7': 'Ansaldo STS USA', '00261B': 'LAUREL BANK MACHINES CO., LTD.', '002614': 'KTNF', '002603': 'Shenzhen Wistar Technology Co., Ltd', '0025A6': 'Central Network Solution Co., Ltd.', '0025AA': 'Beijing Soul Technology Co.,Ltd.', '002588': 'Genie Industries, Inc.', '002580': 'Equipson S.A.', '0025BD': "Italdata Ingegneria dell'Idea S.p.A.", '0025B7': 'Costar electronics, inc.,', '00257D': 'PointRed Telecom Private Ltd.', '0025A2': 'Alta Definicion LINCEO S.L.', '00256D': 'Broadband Forum', '00256C': 'Azimut Production Association JSC', '002563': 'Luxtera Inc', '002593': 'DatNet Informatikai Kft.', '00258E': 'The Weather Channel', '0025A3': 'Trimax Wireless, Inc.', '00259C': 'Cisco-Linksys, LLC', '0025C8': 'S-Access GmbH', '0025C0': 'ZillionTV Corporation', '00251B': 'Philips CareServant', '002518': 'Power PLUS Communications AG', '002515': 'SFR', '00250D': 'GZT Telkom-Telmor sp. z o.o.', '00250E': 'gt german telematics gmbh', '002531': 'Cloud Engines, Inc.', '00252D': 'Kiryung Electronics', '002545': 'Cisco Systems, Inc', '002542': 'Pittasoft', '002536': 'Oki Electric Industry Co., Ltd.', '002541': 'Maquet Critical Care AB', '00252B': 'Stirling Energy Systems', '002524': 'Lightcomm Technology Co., Ltd', '00254E': 'Vertex Wireless Co., Ltd.', '002546': 'Cisco Systems, Inc', '002522': 'ASRock Incorporation', '002560': 'Ibridge Networks & Communications Ltd.', '0024B8': 'free alliance sdn bhd', '0024B3': 'Graf-Syteco GmbH & Co. KG', '0024F6': 'MIYOSHI ELECTRONICS CORPORATION', '0024F0': 'Seanodes', '0024CB': 'Autonet Mobile', '0024D1': 'Thomson Inc.', '0024C9': 'Broadband Solutions Group', '0024CA': 'Tobii Technology AB', '002508': 'Maquet Cardiopulmonary AG', '0024FC': 'QuoPin Co., Ltd.', '0024FB': 'Private', '0024FA': 'Hilger u. Kern GMBH', '0024D0': 'Shenzhen SOGOOD Industry CO.,LTD.', '0024CC': 'Fascinations Toys and Gifts, Inc.', '0024C7': 'Mobilarm Ltd', '0024DF': 'Digitalbox Europe GmbH', '002445': 'Adtran Inc', '00243F': 'Storwize, Inc.', '002497': 'Cisco Systems, Inc', '0024A3': 'Sonim Technologies Inc', '0024AA': 'Dycor Technologies Ltd.', '0024A9': 'Ag Leader Technology', '0024A6': 'TELESTAR DIGITAL GmbH', '00249B': 'Action Star Enterprise Co., Ltd.', '002471': 'Fusion MultiSystems dba Fusion-io', '002474': 'Autronica Fire And Securirty', '002446': 'MMB Research Inc.', '002463': 'Phybridge Inc', '002472': 'ReDriven Power Inc.', '00241C': 'FuGang Electronic (DG) Co.,Ltd', '002419': 'Private', '002415': 'Magnetic Autocontrol GmbH', '0023E7': 'Hinke A/S', '0023E6': 'Pirkus, Inc.', '0023E2': 'SEA Signalisation', '002425': 'Shenzhenshi chuangzhicheng Technology Co.,Ltd', '002427': 'SSI COMPUTER CORP', '002411': 'PharmaSmart LLC', '00240F': 'Ishii Tool & Engineering Corporation', '0023FA': 'RG Nets, Inc.', '0023F2': 'TVLogic', '00240A': 'US Beverage Net', '002407': 'TELEM SAS', '002440': 'Halo Monitoring, Inc.', '0023CF': 'CUMMINS-ALLISON CORP.', '0023C2': 'SAMSUNG Electronics. Co. LTD', '0023C4': 'Lux Lumen', '0023C5': 'Radiation Safety and Control Services Inc', '0023C6': 'SMC Corporation', '002388': 'V.T. Telematica S.p.a.', '002386': 'Tour & Andersson AB', '002383': 'InMage Systems Inc', '002381': 'Lengda Technology(Xiamen) Co.,Ltd.', '0023BF': 'Mainpine, Inc.', '0023B2': 'Intelligent Mechatronic Systems Inc', '0023B5': 'ORTANA LTD', '0023B9': ' Airbus Defence and Space Deutschland GmbH', '0023BD': 'Digital Ally, Inc.', '0023D5': 'WAREMA electronic GmbH', '0023C9': 'Sichuan Tianyi Information Science & Technology Stock CO.,LTD', '0023CE': 'KITA DENSHI CORPORATION', '0023A9': 'Beijing Detianquan Electromechanical Equipment Co., Ltd', '0023A7': 'Redpine Signals, Inc.', '00239B': 'Elster Solutions, LLC', '00237B': 'WHDI LLC', '002324': 'G-PRO COMPUTER', '0023E0': 'INO Therapeutics LLC', '002390': 'Algolware Corporation', '002311': 'Gloscom Co., Ltd.', '002309': 'Janam Technologies LLC', '002304': 'Cisco Systems, Inc', '00235D': 'Cisco Systems, Inc', '00235C': 'Aprius, Inc.', '002352': 'DATASENSOR S.p.A.', '00232F': 'Advanced Card Systems Ltd.', '002353': 'F E T Elettronica snc', '002342': 'Coffee Equipment Company', '002337': 'Global Star Solutions ULC', '002319': 'Sielox LLC', '00236A': 'SmartRG Inc', '002331': 'Nintendo Co., Ltd.', '002335': 'Linkflex Co.,Ltd', '002325': 'IOLAN Holding', '002321': 'Avitech International Corp', '002370': 'Snell', '0022B9': 'Analogix Seminconductor, Inc', '0022B8': 'Norcott', '0022B7': 'GSS Grundig SAT-Systems GmbH', '0022B3': 'Sei S.p.A.', '00229C': 'Verismo Networks Inc', '00229A': 'Lastar, Inc.', '0022EE': 'Algo Communication Products Ltd', '0022EA': 'Rustelcom Inc.', '0022F0': '3 Greens Aviation Limited', '0022EC': 'IDEALBT TECHNOLOGY CORPORATION', '0022DD': 'Protecta Electronics Ltd', '0022AB': 'Shenzhen Turbosight Technology Ltd', '00229B': 'AverLogic Technologies, Inc.', '0022BE': 'Cisco Systems, Inc', '0022BF': 'SieAmp Group of Companies', '0022DB': 'Translogic Corporation', '0022DA': 'ANATEK, LLC', '0022F9': 'Pollin Electronic GmbH', '0022C5': 'INFORSON Co,Ltd.', '002262': 'BEP Marine', '00226A': 'Honeywell', '002263': 'Koos Technical Services, Inc.', '00226C': 'LinkSprite Technologies, Inc.', '00225C': 'Multimedia & Communication Technology', '002284': 'DESAY A&V SCIENCE AND TECHNOLOGY CO.,LTD', '002286': 'ASTRON', '002282': '8086 Consultancy', '002246': 'Evoc Intelligent Technology Co.,Ltd.', '002248': 'Microsoft Corporation', '002290': 'Cisco Systems, Inc', '00228A': 'Teratronik elektronische systeme gmbh', '00228E': 'TV-NUMERIC', '002254': 'Bigelow Aerospace', '002257': '3COM EUROPE LTD', '002276': 'Triple EYE B.V.', '002274': 'FamilyPhone AB', '002236': 'VECTOR SP. Z O.O.', '002230': 'FutureLogic Inc.', '00222E': 'maintech GmbH', '002214': 'RINNAI KOREA', '00220B': 'National Source Coding Center', '00220C': 'Cisco Systems, Inc', '0021EA': 'Bystronic Laser AG', '0021FD': 'LACROIX TRAFFIC S.A.U', '0021CD': 'LiveTV', '0021D0': 'Global Display Solutions Spa', '002228': 'Breeze Innovations Ltd.', '002229': 'Compumedics Ltd', '002216': 'SHIBAURA VENDING MACHINE CORPORATION', '0021E1': 'Nortel Networks', '002200': 'IBM Corp', '0021C6': 'CSJ Global, Inc.', '0021C3': 'CORNELL Communications, Inc.', '0021C7': 'Russound', '0021C1': 'ABB Oy / Medium Voltage Products', '0021C0': 'Mobile Appliance, Inc.', '0021BB': 'Riken Keiki Co., Ltd.', '002166': 'NovAtel Inc.', '002164': 'Special Design Bureau for Seismic Instrumentation', '002160': 'Hidea Solutions Co. Ltd.', '0021B1': 'DIGITAL SOLUTIONS LTD', '0021B0': 'Tyco Telecommunications', '0021AD': 'Nordic ID Oy', '00217F': 'Intraco Technology Pte Ltd', '00217D': 'PYXIS S.R.L.', '00216F': 'SymCom, Inc.', '0021A3': 'Micromint', '0021A5': 'ERLPhase Power Technologies Ltd.', '00219D': 'Adesys BV', '002195': 'GWD Media Limited', '002188': 'EMC Corporation', '00211A': 'LInTech Corporation', '002116': 'Transcon Electronic Systems, spol. s r. o.', '002115': 'PHYWE Systeme GmbH & Co. KG', '002141': 'RADLIVE', '002140': 'EN Technologies Inc.', '00213D': 'Cermetek Microelectronics, Inc.', '002111': 'Uniphone Inc.', '002114': 'Hylab Technology Inc.', '002132': 'Masterclock, Inc.', '002131': 'Blynke Inc.', '002129': 'Cisco-Linksys, LLC', '00211D': 'Dataline AB', '002120': 'Sequel Technologies', '002152': 'General Satellite Research & Development Limited', '002158': 'Style Flying Technology Co.', '002148': 'Kaco Solar Korea', '00213C': 'AliphCom', '001FB5': 'I/O Interconnect Inc.', '001FE7': 'Simet', '001FDB': 'Network Supply Corp.,', '001FBF': 'Fulhua Microelectronics Corp. Taiwan Branch', '001FBE': 'Shenzhen Mopnet Industrial Co.,Ltd', '001FC2': 'Jow Tong Technology Co Ltd', '001FD2': 'COMMTECH TECHNOLOGY MACAO COMMERCIAL OFFSHORE LTD.', '001FB8': 'Universal Remote Control, Inc.', '001FD4': '4IPNET, INC.', '001FCB': 'NIW Solutions', '001FF7': 'Nakajima All Precision Co., Ltd.', '001FEB': 'Trio Datacom Pty Ltd', '001F69': 'Pingood Technology Co., Ltd.', '001F4D': 'Segnetics LLC', '001F81': 'Accel Semiconductor Corp', '001F83': 'Teleplan Technology Services Sdn Bhd', '001F9B': 'POSBRO', '001F78': 'Blue Fox Porini Textile', '001F68': 'Martinsson Elektronik AB', '001F63': 'JSC Goodwin-Europa', '001FAD': 'Brown Innovations, Inc', '001FA6': 'Stilo srl', '001F97': 'BERTANA srl', '001F8C': 'CCS Inc.', '001F10': 'TOLEDO DO BRASIL INDUSTRIA DE BALANCAS LTDA', '001F0F': 'Select Engineered Systems', '001F02': 'Pixelmetrix Corporation Pte Ltd', '001EFE': 'LEVEL s.r.o.', '001F1A': 'Prominvest', '001F18': 'Hakusan.Mfg.Co,.Ltd', '001F13': 'S.& A.S. Ltd.', '001F25': 'MBS GmbH', '001F27': 'Cisco Systems, Inc', '001F26': 'Cisco Systems, Inc', '001EEF': 'Cantronic International Limited', '001EDE': 'BYD COMPANY LIMITED', '001EDD': 'WASKO S.A.', '001EDB': 'Giken Trastem Co., Ltd.', '001F42': 'Etherstack plc', '001F35': 'AIR802 LLC', '001F34': 'Lung Hwa Electronics Co., Ltd.', '001EEB': 'Talk-A-Phone Co.', '001E6A': 'Beijing Bluexon Technology Co.,Ltd', '001E66': 'RESOL Elektronische Regelungen GmbH', '001E63': 'Vibro-Meter SA', '001E7F': 'CBM of America', '001E82': 'SanDisk Corporation', '001EB1': 'Cryptsoft Pty Ltd', '001EAF': 'Ophir Optronics Ltd', '001EAD': 'Wingtech Group Limited', '001ED1': 'Keyprocessor B.V.', '001ED0': 'Ingespace', '001E8E': 'Hunkeler AG', '001E87': 'Realease Limited', '001ECD': 'KYLAND Technology Co. LTD', '001EBF': 'Haas Automation Inc.', '001EBC': 'WINTECH AUTOMATION CO.,LTD.', '001E6F': 'Magna-Power Electronics, Inc.', '001EA1': 'Brunata a/s', '001E53': 'Further Tech Co., LTD', '001E4E': 'DAKO EDV-Ingenieur- und Systemhaus GmbH', '001E49': 'Cisco Systems, Inc', '001E28': 'Lumexis Corporation', '001E24': 'Zhejiang Bell Technology Co.,ltd', '001E20': 'Intertain Inc.', '001E1C': 'SWS Australia Pty Limited', '001E12': 'Ecolab', '001E16': 'Keytronix', '001E32': 'Zensys', '001E35': 'Nintendo Co., Ltd.', '001E2B': 'Radio Systems Design, Inc.', '001E42': 'Teltonika', '001E43': 'AISIN AW CO.,LTD.', '001E08': 'Centec Networks Inc', '001DFB': 'NETCLEUS Systems Corporation', '001DB9': 'Wellspring Wireless', '001DBB': 'Dynamic System Electronics Corp.', '001DB3': 'HPN Supply Chain', '001DB1': 'Crescendo Networks', '001DB4': 'KUMHO ENG CO.,LTD', '001DA4': 'Hangzhou System Technology CO., LTD', '001D9F': 'MATTR.P.Traczynscy Sp.J.', '001D90': 'EMCO Flow Systems', '001D93': 'Modacom', '001D94': 'Climax Technology Co., Ltd', '001D8E': 'Alereon, Inc.', '001DDB': 'C-BEL Corporation', '001DE6': 'Cisco Systems, Inc', '001DE7': 'Marine Sonic Technology, Ltd.', '001D7B': 'Ice Energy, Inc.', '001D6C': 'ClariPhy Communications, Inc.', '001DC5': 'Beijing Jiaxun Feihong Electricial Co., Ltd.', '001DC6': 'SNR Inc.', '001D84': 'Gateway, Inc.', '001D85': 'Call Direct Cellular Solutions', '001DBF': 'Radiient Technologies, Inc.', '001D34': 'SYRIS Technology Corp', '001D32': 'Longkay Communication & Technology (Shanghai) Co. Ltd', '001D2A': 'SHENZHEN BUL-TECH CO.,LTD.', '001D2D': 'Pylone, Inc.', '001D5B': 'Tecvan Informática Ltda', '001D5D': 'Control Dynamics Pty. Ltd.', '001D59': 'Mitra Energy & Infrastructure', '001D2B': 'Wuhan Pont Technology CO. , LTD', '001D22': 'Foss Analytical A/S', '001D23': 'SENSUS ', '001D3E': 'SAKA TECHNO SCIENCE CO.,LTD', '001D40': ' Intel – GE Care Innovations LLC', '001D57': 'CAETEC Messtechnik', '001D51': 'Babcock & Wilcox Power Generation Group, Inc', '001D4C': 'Alcatel-Lucent', '001D1A': 'OvisLink S.A.', '001CB7': 'USC DigiArk Corporation', '001CAF': 'Plato Networks Inc.', '001CB1': 'Cisco Systems, Inc', '001CFE': 'Quartics Inc', '001D0B': 'Power Standards Lab', '001D02': 'Cybertech Telecom Development', '001CE9': 'Galaxy Technology Limited', '001CEA': 'Scientific-Atlanta, Inc', '001CE7': 'Rocon PLC Research Centre', '001CDB': 'CARPOINT CO.,LTD', '001CD5': 'ZeeVee, Inc.', '001CBC': 'CastGrabber, LLC', '001CE4': 'EleSy JSC', '001CE2': 'Attero Tech, LLC.', '001CAA': 'Bellon Pty Ltd', '001CA0': 'Production Resource Group, LLC', '001CD3': 'ZP Engineering SEL', '001CCB': 'Forth Corporation Public Company Limited', '001C75': 'Segnet Ltd.', '001C74': 'Syswan Technologies Inc.', '001C68': 'Anhui Sun Create Electronics Co., Ltd', '001C66': 'UCAMP CO.,LTD', '001C98': 'LUCKY TECHNOLOGY (HK) COMPANY LIMITED', '001C91': 'Gefen Inc.', '001C81': 'NextGen Venturi LTD', '001C7A': 'Perfectone Netware Company Ltd', '001C7B': 'Castlenet Technology Inc.', '001C53': 'Synergy Lighting Controls', '001C4D': 'Aplix IP Holdings Corporation', '001C92': 'Tervela', '001C8A': 'Cirrascale Corporation', '001C38': 'Bio-Rad Laboratories, Inc.', '001C30': 'Mode Lighting (UK ) Ltd.', '001C2E': 'HPN Supply Chain', '001C2A': 'Envisacor Technologies Inc.', '001C02': 'Pano Logic', '001C05': 'Nonin Medical Inc.', '001C06': 'Siemens Numerical Control Ltd., Nanjing', '001BEA': 'Nintendo Co., Ltd.', '001BE5': '802automation Limited', '001BE4': 'TOWNET SRL', '001C04': 'Airgain, Inc.', '001C01': 'ABB Oy Drives', '001BFF': 'Millennia Media inc.', '001BF2': 'KWORLD COMPUTER CO., LTD', '001BF0': 'Value Platforms Limited', '001C1B': 'Hyperstone GmbH', '001C10': 'Cisco-Linksys, LLC', '001BD2': 'ULTRA-X ASIA PACIFIC Inc.', '001B8D': 'Electronic Computer Systems, Inc.', '001B86': 'Bosch Access Systems GmbH', '001BC2': 'Integrated Control Technology Limitied', '001BBB': 'RFTech Co.,Ltd', '001BAA': 'XenICs nv', '001B7C': 'A & R Cambridge', '001B5D': 'Vololink Pty Ltd', '001B5A': 'Apollo Imaging Technologies, Inc.', '001B56': 'Tehuti Networks Ltd.', '001BC6': 'Strato Rechenzentrum AG', '001BC4': 'Ultratec, Inc.', '001BA1': 'Åmic AB', '001B96': 'General Sensing', '001AEA': 'Radio Terminal Systems Pty Ltd', '001ADD': 'PePWave Ltd', '001AD6': 'JIAGNSU AETNA ELECTRIC CO.,LTD', '001AD4': 'iPOX Technology Co., Ltd.', '001B14': 'Carex Lighting Equipment Factory', '001B15': 'Voxtel, Inc.', '001B09': 'Matrix Telecom Pvt. Ltd.', '001B03': 'Action Technology (SZ) Co., Ltd', '001AFB': 'Joby Inc.', '001AFD': 'EVOLIS', '001B1E': 'HART Communication Foundation', '001B4C': 'Signtech', '001AD5': 'KMC CHAIN INDUSTRIAL CO., LTD.', '001AD0': 'Albis Technologies AG', '001AD3': 'Vamp Ltd.', '001AD8': 'AlsterAero GmbH', '001ADA': 'Biz-2-Me Inc.', '001A6F': 'MI.TEL s.r.l.', '001A71': 'Diostech Co., Ltd.', '001A69': 'Wuhan Yangtze Optical Technology CO.,Ltd.', '001A67': 'Infinite QL Sdn Bhd', '001AC3': 'Scientific-Atlanta, Inc', '001ABF': 'TRUMPF Laser Marking Systems AG', '001AB8': 'Anseri Corporation', '001ABC': 'U4EA Technologies Ltd', '001ACB': 'Autocom Products Ltd', '001ACF': 'C.T. ELETTRONICA', '001AA3': 'DELORME', '001A9B': 'ADEC & Parter AG', '001A9D': 'Skipper Wireless, Inc.', '001A85': 'NV Michel Van de Wiele', '001A8E': '3Way Networks Ltd', '001A44': 'JWTrading Co., Ltd', '001A49': 'Micro Vision Co.,LTD', '001A3D': 'Ajin Vision Co.,Ltd', '001A41': 'INOCOVA Co.,Ltd', '001A33': 'ASI Communications, Inc.', '001A23': 'Ice Qube, Inc', '001A1D': 'PChome Online Inc.', '001A17': 'Teak Technologies, Inc.', '001A1C': 'GT&T Engineering Pte Ltd', '001A1F': 'Coastal Environmental Systems', '001A64': 'IBM Corp', '001A51': 'Alfred Mann Foundation', '001A55': 'ACA-Digital Corporation', '0019E6': 'TOYO MEDIC CO.,LTD.', '0019E2': 'Juniper Networks', '0019E8': 'Cisco Systems, Inc', '0019DF': 'Thomson Inc.', '0019DD': 'FEI-Zyfer, Inc.', '0019B2': 'XYnetsoft Co.,Ltd', '0019A4': 'Austar Technology (hang zhou) Co.,Ltd', '0019AA': 'Cisco Systems, Inc', '0019B1': 'Arrow7 Corporation', '0019B3': 'Stanford Research Systems', '001A0A': 'Adaptive Micro-Ware Inc.', '001A05': 'OPTIBASE LTD', '0019D4': 'ICX Technologies', '0019CF': 'SALICRU, S.A.', '0019FC': 'PT. Ufoakses Sukses Luarbiasa', '0019F4': 'Convergens Oy Ltd', '001996': 'TurboChef Technologies Inc.', '001997': 'Soft Device Sdn Bhd', '001998': 'SATO CORPORATION', '00199C': 'CTRING', '001946': 'Cianet Industria e Comercio S/A', '001949': 'TENTEL COMTECH CO., LTD.', '001944': 'Fossil Partners, L.P.', '001971': 'Guangzhou Unicomp Technology Co.,Ltd', '001964': 'Doorking Inc.', '001976': 'Xipher Technologies, LLC', '00196C': 'ETROVISION TECHNOLOGY', '001967': 'TELDAT Sp.J.', '001952': 'ACOGITO Co., Ltd', '00198B': 'Novera Optics Korea, Inc.', '001961': 'Blaupunkt Embedded Systems GmbH', '001942': 'ON SOFTWARE INTERNATIONAL LIMITED', '00193F': 'RDI technology(Shenzhen) Co.,LTD', '001941': 'Pitney Bowes, Inc', '0018FD': 'Optimal Technologies International Inc.', '0018F0': 'JOYTOTO Co., Ltd.', '0018E9': 'Numata Corporation', '001908': 'Duaxes Corporation', '00190C': 'Encore Electronics, Inc.', '001919': 'ASTEL Inc.', '00192D': 'Nokia Corporation', '0018EF': 'Escape Communications, Inc.', '0018E6': 'Computer Hardware Design SIA', '001930': 'Cisco Systems, Inc', '001927': 'ImCoSys Ltd', '0018F7': 'Kameleon Technologies', '001885': 'Avigilon Corporation', '001888': 'GOTIVE a.s.', '00188A': 'Infinova LLC', '001886': 'EL-TECH, INC.', '001887': 'Metasystem SpA', '0018BE': 'ANSA Corporation', '0018BA': 'Cisco Systems, Inc', '0018B4': 'Dawon Media Inc.', '0018B6': 'S3C, Inc.', '0018A3': 'ZIPPY TECHNOLOGY CORP.', '0018A0': 'Cierma Ascenseurs', '001893': 'SHENZHEN PHOTON BROADBAND TECHNOLOGY CO.,LTD', '0018B1': 'IBM Corp', '00187B': '4NSYS Co. Ltd.', '00187F': 'ZODIANET', '00187E': 'RGB Spectrum', '00189D': 'Navcast Inc.', '0018D6': 'Swirlnet A/S', '0018CD': 'Erae Electronics Industry Co., Ltd', '0018DB': 'EPL Technology Ltd', '0018C8': 'ISONAS Inc.', '001849': 'nVent, Schroff GmbH', '001846': 'Crypto S.A.', '001845': 'Pulsar-Telecom LLC.', '00181E': 'GDX Technologies Ltd.', '00181C': 'Exterity Limited', '001863': 'Veritech Electronics Limited', '00185A': 'uControl, Inc.', '001852': 'StorLink Semiconductors, Inc.', '001850': 'Secfone Kft', '001858': 'TagMaster AB', '001824': 'Kimaldi Electronics, S.L.', '00183D': 'Vertex Link Corporation', '001825': 'Private', '001879': 'dSys', '001803': 'ArcSoft Shanghai Co. LTD', '0017EF': 'IBM Corp', '0017F5': 'LIG NEOPTEK', '0017CD': 'CEC Wireless R&D Ltd.', '0017D0': 'Opticom Communications, LLC', '0017C6': 'Cross Match Technologies Inc', '0017FE': 'TALOS SYSTEM INC.', '0017F8': 'Motech Industries Inc.', '0017C3': 'KTF Technologies Inc.', '0017B7': 'Tonze Technology Co.', '0017AB': 'Nintendo Co., Ltd.', '001807': 'Fanstel Corp.', '001808': 'SightLogix, Inc.', '0017CE': 'Screen Service Spa', '0017DB': 'CANKO TECHNOLOGIES INC.', '0017D6': 'Bluechips Microhouse Co.,Ltd.', '001787': 'Brother, Brother & Sons ApS', '001789': 'Zenitron Corporation', '001760': 'Naito Densei Machida MFG.CO.,LTD', '001761': 'Private', '001768': 'Zinwave Ltd', '001769': 'Cymphonix Corp', '001762': 'Solar Technology, Inc.', '00178F': 'NINGBO YIDONG ELECTRONIC CO.,LTD.', '001794': 'Cisco Systems, Inc', '00178A': 'DARTS TECHNOLOGIES CORP.', '001734': 'ADC Telecommunications', '00172E': 'FXC Inc.', '00172B': 'Global Technologies Inc.', '001772': 'ASTRO Strobel Kommunikationssysteme GmbH', '00173E': 'LeucotronEquipamentos Ltda.', '001798': 'Azonic Technology Co., LTD', '001747': 'Trimble', '00177A': 'ASSA ABLOY AB', '0016F4': 'Eidicom Co., Ltd.', '0016E7': 'Dynamix Promotions Limited', '0016E5': 'FORDLEY DEVELOPMENT LIMITED', '0016E6': 'GIGA-BYTE TECHNOLOGY CO.,LTD.', '001720': 'Image Sensing Systems, Inc.', '00171A': 'Winegard Company', '0016C8': 'Cisco Systems, Inc', '0016C4': 'SiRF Technology, Inc.', '0016F3': 'CAST Information Co., Ltd', '0016F5': 'Dalian Golden Hualu Digital Technology Co.,Ltd', '0016F1': 'OmniSense, LLC', '0016DD': 'Gigabeam Corporation', '00171C': 'NT MicroSystems, Inc.', '001716': 'Qno Technology Inc.', '001726': 'm2c Electronic Technology Ltd.', '001721': 'FITRE S.p.A.', '0016F9': 'CETRTA POT, d.o.o., Kranj', '00170A': 'INEW DIGITAL COMPANY', '0016BD': 'ATI Industrial Automation', '0016C0': 'Semtech Corporation', '0016C2': 'Avtec Systems Inc', '0016BA': 'WEATHERNEWS INC.', '0016B2': 'DriveCam Inc', '0016B3': 'Photonicbridges (China) Co., Ltd.', '0016AD': 'BT-Links Company Limited', '00166E': 'Arbitron Inc.', '0016AF': 'Shenzhen Union Networks Equipment Co.,Ltd.', '00169E': 'TV One Ltd', '00166A': 'TPS', '001663': 'KBT Mobile', '00169F': 'Vimtron Electronics Co., Ltd.', '00169A': 'Quadrics Ltd', '001692': 'Scientific-Atlanta, Inc.', '001691': 'Moser-Baer AG', '001688': 'ServerEngines LLC', '00168B': 'Paralan Corporation', '001682': 'Pro Dex, Inc', '00160A': 'SWEEX Europe BV', '001602': 'CEYON TECHNOLOGY CO.,LTD.', '001600': 'CelleBrite Mobile Synchronization', '0015F4': 'Eventide', '001629': 'Nivus GmbH', '001621': 'Colorado Vnet', '00161A': 'Dametric AB', '001615': 'Nittan Company, Limited', '001616': 'BROWAN COMMUNICATION INC.', '001617': 'MSI', '00162E': 'Space Shuttle Hi-Tech Co., Ltd.', '00162A': 'Antik computers & communications s.r.o.', '001656': 'Nintendo Co., Ltd.', '001651': 'Exeo Systems', '001610': 'Carina Technology', '001606': 'Ideal Industries', '001607': 'Curves International Inc.', '001654': 'Flex-P Industries Sdn. Bhd.', '001631': 'Xteam', '00164B': 'Quorion Data Systems GmbH', '001594': 'BIXOLON CO.,LTD', '001590': 'Hectronic GmbH', '00158C': 'Liab ApS', '00158F': 'NTT Advanced Technology Corporation', '0015ED': 'Fulcrum Microsystems, Inc.', '0015F0': 'EGO BV', '0015EE': 'Omnex Control Systems', '0015C8': 'FlexiPanel Ltd', '0015C0': 'DIGITAL TELEMEDIA CO.,LTD.', '0015C2': '3M Germany', '001588': 'Salutica Allied Solutions Sdn Bhd', '001583': 'IVT corporation', '001585': 'Aonvision Technolopy Corp.', '0015A5': 'DCI Co., Ltd.', '0015B2': 'Advanced Industrial Computer, Inc.', '0015DA': 'IRITEL A.D.', '00154A': 'WANSHIH ELECTRONIC CO., LTD', '00154C': 'Saunders Electronics', '00154D': 'Netronome Systems, Inc.', '001549': 'Dixtal Biomedica Ind. Com. Ltda', '00153D': 'ELIM PRODUCT CO.', '001544': 'coM.s.a.t. AG', '001539': 'Technodrive srl', '001531': 'KOCOM', '001535': 'OTE Spa', '001536': 'Powertech co.,Ltd', '00152B': 'Cisco Systems, Inc', '00152C': 'Cisco Systems, Inc', '001528': 'Beacon Medical Products LLC d.b.a. BeaconMedaes', '001527': 'Balboa Instruments', '001521': 'Horoquartz', '001520': 'Radiocrafts AS', '001566': 'A-First Technology Co., Ltd.', '00156B': 'Perfisans Networks Corp.', '001547': 'AiZen Solutions Inc.', '001579': 'Lunatone Industrielle Elektronik GmbH', '0014D7': 'Datastore Technology Corp', '0014DD': 'Covergence Inc.', '0014D4': 'K Technology Corporation', '0014CF': 'INVISIO Communications', '0014BE': 'Wink communication technology CO.LTD', '001511': 'Data Center Systems', '00150E': 'OPENBRAIN TECHNOLOGIES CO., LTD.', '00150D': 'Hoana Medical, Inc.', '00151C': 'LENECO', '001519': 'StoreAge Networking Technologies', '001506': 'Neo Photonics', '001504': 'GAME PLUS CO., LTD.', '001505': 'Actiontec Electronics, Inc', '0014FE': 'Artech Electronics', '0014DE': 'Sage Instruments Inc.', '0014DF': 'HI-P Tech Corporation', '0014E6': 'AIM Infrarotmodule GmbH', '0014F3': 'ViXS Systems Inc', '00147E': 'InnerWireless', '00147D': 'Aeon Digital International', '001476': 'MultiCom Industries Limited', '001473': 'Bookham Inc', '001489': 'B15402100 - JANDEI, S.L.', '001480': 'Hitachi-LG Data Storage Korea, Inc', '0014B6': 'Enswer Technology Inc.', '0014B2': 'mCubelogics Corporation', '0014AE': 'Wizlogics Co., Ltd.', '0014A6': 'Teranetics, Inc.', '001469': 'Cisco Systems, Inc', '0014BA': 'Carvers SA de CV', '00148A': 'Elin Ebg Traction Gmbh', '001491': 'Daniels Electronics Ltd. dbo Codan Rado Communications', '00146E': 'H. Stoll GmbH & Co. KG', '0014AA': 'Ashly Audio, Inc.', '001409': 'MAGNETI MARELLIS.E. S.p.A.', '00140B': 'FIRST INTERNATIONAL COMPUTER, INC.', '0013FD': 'Nokia Danmark A/S', '001400': 'MINERVA KOREA CO., LTD', '0013FC': 'SiCortex, Inc', '0013F6': 'Cintech', '00144F': 'Oracle Corporation ', '001456': 'Edge Products', '001450': 'Heim Systems GmbH', '001452': 'CALCULEX,INC.', '001442': 'ATTO CORPORATION', '001447': 'BOAZ Inc.', '00143E': 'AirLink Communications, Inc.', '00145D': 'WJ Communications, Inc.', '00143B': 'Sensovation AG', '00142D': 'Toradex AG', '001429': 'V Center Technologies Co., Ltd.', '001414': 'Jumpnode Systems LLC.', '00141E': 'P.A. Semi, Inc.', '0013CB': 'Zenitel Norway AS', '0013CF': '4Access Communications', '0013BE': 'Virtual Conexions', '0013B9': 'BM SPA', '0013AB': 'Telemotive AG', '0013C9': 'Beyond Achieve Enterprises Ltd.', '0013C6': 'OpenGear, Inc', '0013F3': 'Giga-byte Communications Inc.', '0013F4': 'Psitek (Pty) Ltd', '0013AC': 'Sunmyung Electronics Co., LTD', '0013A8': 'Tanisys Technology', '0013DA': 'Diskware Co., Ltd', '0013D8': 'Princeton Instruments', '001399': 'STAC Corporation.', '0013E9': 'VeriWave, Inc.', '001395': 'congatec AG', '001356': 'FLIR Radiation Inc', '00135A': 'Project T&E Limited', '001361': 'Biospace Co., Ltd.', '001362': 'ShinHeung Precision Co., Ltd.', '00134F': 'Rapidus Wireless Networks Inc.', '001378': 'Qsan Technology, Inc.', '00137A': 'Netvox Technology Co., Ltd.', '001381': 'CHIPS & Systems, Inc.', '001379': 'PONDER INFORMATION INDUSTRIES LTD.', '00132C': 'MAZ Brandenburg GmbH', '001324': 'Schneider Electric Ultra Terminal', '001326': 'ECM Systems Ltd', '001327': 'Data Acquisitions limited', '001367': 'Narayon. Co., Ltd.', '00135C': 'OnSite Systems, Inc.', '00135F': 'Cisco Systems, Inc', '00133B': 'Speed Dragon Multimedia Limited', '001338': 'FRESENIUS-VIAL', '00132D': 'iWise Communications', '001374': 'Atheros Communications, Inc.', '001369': 'Honda Electron Co., LED.', '001342': 'Vision Research, Inc.', '001340': 'AD.EL s.r.l.', '00130C': 'HF System Corporation', '00130F': 'EGEMEN Bilgisayar Muh San ve Tic LTD STI', '001313': 'GuangZhou Post & Telecom Equipment ltd', '0012CB': 'CSS Inc.', '0012CE': 'Advanced Cybernetics Group', '0012CA': 'Mechatronic Brick Aps', '0012C7': 'SECURAY Technologies Ltd.Co.', '0012CD': 'ASEM SpA', '0012E8': 'Fraunhofer IMS', '0012DD': 'Shengqu Information Technology (Shanghai) Co., Ltd.', '00131D': 'Scanvaegt International A/S', '001318': 'DGSTATION Co., Ltd.', '00131A': 'Cisco Systems, Inc', '0012F4': 'Belco International Co.,Ltd.', '0012F5': 'Imarda New Zealand Limited', '0012E0': 'Codan Limited', '0012DE': 'Radio Components Sweden AB', '001301': 'IronGate S.L.', '0012F6': 'MDK CO.,LTD.', '0012F1': 'IFOTEC', '0012F8': 'WNI Resources, LLC', '00127C': 'SWEGON AB', '001278': 'International Bar Code', '00127A': 'Sanyu Industry Co.,Ltd.', '001268': 'IPS d.o.o.', '0012AC': 'ONTIMETEK INC.', '0012BC': 'Echolab LLC', '0012BD': 'Avantec Manufacturing Limited', '0012AB': 'WiLife, Inc.', '00129B': 'E2S Electronic Engineering Solutions, S.L.', '001298': 'MICO ELECTRIC(SHENZHEN) LIMITED', '001272': 'Redux Communications Ltd.', '00126A': 'OPTOELECTRONICS Co., Ltd.', '0012C0': 'HotLava Systems, Inc.', '0012B7': 'PTW Freiburg', '0012A1': 'BluePacket Communications Co., Ltd.', '001206': 'iQuest (NZ) Ltd', '001207': 'Head Strong International Limited', '001209': 'Fastrax Ltd', '00120B': 'Chinasys Technologies Limited', '001205': 'Terrasat Communications, Inc.', '001238': 'SetaBox Technology Co., Ltd.', '00123C': 'Second Rule LLC', '00123E': 'ERUNE technology Co., Ltd.', '001254': 'Spectra Technologies Holdings Company Ltd', '00124F': 'nVent', '001221': 'B.Braun Melsungen AG', '001212': 'PLUS Corporation', '001213': 'Metrohm AG', '001218': 'ARUZE Corporation', '001249': 'Delta Elettronica S.p.A.', '00124D': 'Inducon BV', '001266': 'Swisscom Hospitality Services SA', '00125E': 'CAEN', '00125D': 'CyberNet Inc.', '001223': 'Pixim', '00123A': 'Posystech Inc., Co.', '001234': 'Camille Bauer', '0011E3': 'Thomson, Inc.', '0011DC': 'Glunz & Jensen', '001196': 'Actuality Systems, Inc.', '001187': 'Category Solutions, Inc', '001190': 'Digital Design Corporation', '0011C4': 'Terminales de Telecomunicacion Terrestre, S.L.', '0011CB': 'Jacobsons AB', '0011F7': 'Shenzhen Forward Industry Co., Ltd', '0011D3': 'NextGenTel Holding ASA', '0011AB': 'TRUSTABLE TECHNOLOGY CO.,LTD.', '0011A5': 'Fortuna Electronic Corp.', '00113C': 'Micronas GmbH', '001131': 'UNATECH. CO.,LTD', '001127': 'TASI, Inc', '00112A': 'Niko NV', '00112B': 'NetModule AG', '00116F': 'Netforyou Co., LTD.', '001171': 'DEXTER Communications, Inc.', '001167': 'Integrated System Solution Corp.', '00116D': 'American Time and Signal', '001160': 'ARTDIO Company Co., LTD', '001154': 'Webpro Technologies Inc.', '001145': 'ValuePoint Networks', '001151': 'Mykotronx', '00114E': '690885 Ontario Inc.', '00112D': 'iPulse Systems', '00117B': 'Büchi Labortechnik AG', '000FEE': 'XTec, Incorporated', '000FE4': 'Pantech Co.,Ltd', '000FE7': 'Lutron Electronics Co., Inc.', '000FE6': 'MBTech Systems, Inc.', '000FDA': 'YAZAKI CORPORATION', '000FEF': 'Thales e-Transactions GmbH', '000FE9': 'GW TECHNOLOGIES CO.,LTD.', '000FE1': 'ID DIGITAL CORPORATION', '000FDF': 'SOLOMON Technology Corp.', '000FFE': 'G-PRO COMPUTER', '00110B': 'Franklin Technology Systems', '001105': 'Sunplus Technology Co., Ltd.', '001102': 'Aurora Multimedia Corp.', '001123': 'Appointech, Inc.', '00110F': 'netplat,Inc.', '000FD6': 'Sarotech Co., Ltd', '001115': 'EPIN Technologies, Inc.', '000FC7': 'Dionica R&D Ltd.', '000F64': 'D&R Electronica Weesp BV', '000F75': 'First Silicon Solutions', '000F7C': 'ACTi Corporation', '000F7A': 'BeiJing NuQX Technology CO.,LTD', '000FAD': 'FMN communications GmbH', '000FAB': 'Kyushu Electronics Systems Inc.', '000FAC': 'IEEE 802.11', '000F72': 'Sandburst', '000FB3': 'Actiontec Electronics, Inc', '000F9C': 'Panduit Corp', '000FC8': 'Chantry Networks', '000FBB': 'Nokia Siemens Networks GmbH & Co. KG.', '000FBC': 'Onkey Technologies, Inc.', '000FB6': 'Europlex Technologies', '000FB9': 'Adaptive Instruments', '000F98': 'Avamax Co. Ltd.', '000F97': 'Avanex Corporation', '000F96': 'Telco Systems, Inc. ', '000F8B': 'Orion MultiSystems Inc', '000F8C': 'Gigawavetech Pte Ltd', '000F38': 'Netstar', '000F3A': 'HISHARP', '000F30': 'Raza Microelectronics Inc', '000F53': 'Solarflare Communications Inc.', '000F51': 'Azul Systems, Inc.', '000F07': 'Mangrove Systems, Inc.', '000F00': 'Legra Systems, Inc.', '000F01': 'DIGITALKS INC', '000F03': 'COM&C CO., LTD', '000F34': 'Cisco Systems, Inc', '000F2E': 'Megapower International Corp.', '000F26': 'WorldAccxx LLC', '000F45': 'Stretch, Inc.', '000F3B': 'Fuji System Machines Co., Ltd.', '000F37': 'Xambala Incorporated', '000F54': 'Entrelogic Corporation', '000F25': 'AimValley B.V.', '000F19': 'Boston Scientific', '000F10': 'RDM Corporation', '000EA8': 'United Technologists Europe Limited', '000EAA': 'Scalent Systems, Inc.', '000EAC': 'MINTRON ENTERPRISE CO., LTD.', '000EAE': 'GAWELL TECHNOLOGIES CORP.', '000EBA': 'HANMI SEMICONDUCTOR CO., LTD.', '000EBC': 'Paragon Fidelity GmbH', '000EC5': 'Digital Multitools Inc', '000EC7': 'Motorola Korea', '000E8B': 'Astarte Technology Co, Ltd.', '000EDF': 'PLX Technology', '000EE1': 'ExtremeSpeed Inc.', '000ED7': 'Cisco Systems, Inc', '000ED3': 'Epicenter, Inc.', '000EA4': 'Certance Inc.', '000E9D': 'Tiscali UK Ltd', '000E47': 'NCI System Co.,Ltd.', '000E46': 'Niigata Seimitsu Co.,Ltd.', '000E1F': 'TCL Networks Equipment Co., Ltd.', '000E26': 'Gincom Technology Corp.', '000E6C': 'Device Drivers Limited', '000E6B': 'Janitza electronics GmbH', '000E5F': 'activ-net GmbH & Co. KG', '000E57': 'Iworld Networking, Inc.', '000E51': 'tecna elettronica srl', '000E67': 'Eltis Microelectronics Ltd.', '000E65': 'TransCore', '000E43': 'G-Tek Electronics Sdn. Bhd.', '000E44': 'Digital 5, Inc.', '000E74': 'Solar Telecom. Tech', '000E7A': 'GemWon Communications Co., Ltd.', '000E80': 'Thomson Technology Inc', '000E85': 'Catalyst Enterprises, Inc.', '000E32': 'Kontron Medical', '000E0B': 'Netac Technology Co., Ltd.', '000E11': 'BDT Büro und Datentechnik GmbH & Co.KG ', '000DF5': 'Teletronics International Inc.', '000DF7': 'Space Dynamics Lab', '000DEB': 'CompXs Limited', '000DEE': 'Andrew RF Power Amplifier Group', '000DEF': 'Soc. Coop. Bilanciai', '000DE7': 'Snap-on OEM Group', '000DC6': 'DigiRose Technology Co., Ltd.', '000DC1': 'SafeWeb Inc', '000DF0': 'QCOM TECHNOLOGY INC.', '000DE5': 'Samsung Thales', '000DC0': 'Spagat AS', '000DDD': 'Profilo Telra Elektronik Sanayi ve Ticaret. A.Ş', '000DF8': 'ORGA Kartensysteme GmbH', '000E16': 'SouthWing S.L.', '000DA7': 'Private', '000DAD': 'Dataprobe, Inc.', '000DA9': 'T.E.A.M. S.L.', '000DAB': 'Parker Hannifin GmbH Electromechanical Division Europe', '000DA8': 'Teletronics Technology Corporation', '000D79': 'Dynamic Solutions Co,.Ltd.', '000D6D': 'K-Tech Devices Corp.', '000DB4': 'NETASQ', '000DAF': 'Plexus Corp (UK) Ltd', '000DB1': 'Japan Network Service Co., Ltd.', '000D83': 'Sanmina-SCI Hungary Ltd.', '000D7F': 'MIDAS COMMUNICATION TECHNOLOGIES PTE LTD ( Foreign Branch)', '000D75': 'Kobian Pte Ltd - Taiwan Branch', '000D78': 'Engineering & Security', '000D9E': 'TOKUDEN OHIZUMI SEISAKUSYO Co.,Ltd.', '000DA3': 'Emerging Technologies Limited', '000DA4': 'DOSCH & AMAND SYSTEMS AG', '000D9A': 'INFOTEC LTD', '000D6E': 'K-Patents Oy', '000D6A': 'Redwood Technologies LTD', '000DBA': 'Océ Document Technologies GmbH', '000DBD': 'Cisco Systems, Inc', '000DB5': 'GLOBALSAT TECHNOLOGY CORPORATION', '000D97': 'ABB Inc./Tropos', '000D96': 'Vtera Technology Inc.', '000D8B': 'T&D Corporation', '000D90': 'Factum Electronics AB', '000D85': 'Tapwave, Inc.', '000D82': 'PHSNET', '000D23': 'Smart Solution, Inc', '000D27': 'MICROPLEX Printware AG', '000D1B': 'Kyoto Electronics Manufacturing Co., Ltd.', '000D1D': 'HIGH-TEK HARNESS ENT. CO., LTD.', '000D02': 'NEC Platforms, Ltd.', '000D07': 'Calrec Audio Ltd', '000D40': 'Verint Loronix Video Solutions', '000D21': 'WISCORE Inc.', '000D09': 'Yuehua(Zhuhai) Electronic CO. LTD', '000D37': 'WIPLUG', '000D33': 'Prediwave Corp.', '000D2F': 'AIN Comm.Tech.Co., LTD', '000D5D': 'Raritan Computer, Inc', '000D63': 'DENT Instruments, Inc.', '000D66': 'Cisco Systems, Inc', '000CCD': 'IEC - TC57', '000CCB': 'Design Combus Ltd', '000CC9': 'ILWOO DATA & TECHNOLOGY CO.,LTD', '000CE9': 'BLOOMBERG L.P.', '000CEA': 'aphona Kommunikationssysteme', '000CDA': 'FreeHand Systems, Inc.', '000CF5': 'InfoExpress', '000CEE': 'jp-embedded', '000CAF': 'TRI TERM CO.,LTD.', '000CB3': 'ROUND Co.,Ltd.', '000CDD': 'AOS technologies AG', '000CDF': 'PULNiX America, Inc', '000CA5': 'Naman NZ LTd', '000CA9': 'Ebtron Inc.', '000CFB': 'Korea Network Systems', '000C7A': 'DaTARIUS Technologies GmbH', '000C79': 'Extel Communications P/L', '000C75': 'Oriental integrated electronics. LTD', '000C5D': 'CHIC TECHNOLOGY (CHINA) CORP.', '000C4F': 'UDTech Japan Corporation', '000C62': 'ABB AB, Cewe-Control ', '000C4C': 'Arcor AG&Co.', '000C47': 'SK Teletech(R&D Planning Team)', '000C6D': 'Edwards Ltd.', '000C70': 'ACC GmbH', '000C6A': 'MBARI', '000C6B': 'Kurz Industrie-Elektronik GmbH', '000C9D': 'UbeeAirWalk, Inc.', '000C9F': 'NKE Corporation', '000C84': 'Eazix, Inc.', '000C85': 'Cisco Systems, Inc', '000C3F': 'Cogent Defence & Security Networks,', '000C30': 'Cisco Systems, Inc', '000C9A': 'Hitech Electronics Corp.', '000C91': 'Riverhead Networks Inc.', '000BDA': 'EyeCross Co.,Inc.', '000BD6': 'Paxton Access Ltd', '000BD4': 'Beijing Wise Technology & Science Development Co.Ltd', '000C1F': 'Glimmerglass Networks', '000C20': 'Fi WIn, Inc.', '000C15': 'CyberPower Systems, Inc.', '000BF8': 'Infinera', '000BFF': 'Berkeley Camera Engineering', '000C16': 'Concorde Microsystems Inc.', '000C09': 'Hitachi IE Systems Co., Ltd', '000BE7': 'COMFLUX TECHNOLOGY INC.', '000BEC': 'NIPPON ELECTRIC INSTRUMENT, INC.', '000C02': 'ABB Oy', '000B71': 'Litchfield Communications Inc.', '000B74': 'Kingwave Technology Co., Ltd.', '000B73': 'Kodeos Communications', '000B76': 'ET&T Technology Co. Ltd.', '000B5E': 'Audio Engineering Society Inc.', '000B60': 'Cisco Systems, Inc', '000B65': 'Sy.A.C. srl', '000B5F': 'Cisco Systems, Inc', '000B61': 'Friedrich Lütze GmbH & Co. KG', '000B59': 'ScriptPro, LLC', '000B5C': 'Newtech Co.,Ltd', '000B7E': 'SAGINOMIYA Seisakusho Inc.', '000B80': 'Lycium Networks', '000BA7': 'Maranti Networks', '000BAA': 'Aiphone co.,Ltd', '000BA4': 'Shiron Satellite Communications Ltd. (1996)', '000BD0': 'XiMeta Technology Americas Inc.', '000BC5': 'SMC Networks, Inc.', '000BC6': 'ISAC, Inc.', '000BC1': 'Bay Microsystems, Inc.', '000B8B': 'KERAJET, S.A.', '000B89': 'Top Global Technology, Ltd.', '000B99': 'SensAble Technologies, Inc.', '000B9C': 'TriBeam Technologies, Inc.', '000B7C': 'Telex Communications', '000B83': 'DATAWATT B.V.', '000B20': 'Hirata corporation', '000B22': 'Environmental Systems and Services', '000B1B': 'Systronix, Inc.', '000B03': 'Taekwang Industrial Co., Ltd', '000B01': 'DAIICHI ELECTRONICS CO., LTD.', '000B3E': 'BittWare, Inc', '000B29': 'LS(LG) Industrial Systems co.,Ltd', '000B39': 'Keisoku Giken Co.,Ltd.', '000B3A': 'QuStream Corporation', '000B33': 'Vivato Technologies', '000B05': 'Pacific Broadband Networks', '000B00': 'FUJIAN START COMPUTER EQUIPMENT CO.,LTD', '000B5B': 'Rincon Research Corporation', '000AF6': 'Emerson Climate Technologies Retail Solutions, Inc.', '000B11': 'HIMEJI ABC TRADING CO.,LTD.', '000B41': 'Ing. Büro Dr. Beutlhauser', '000AEA': 'ADAM ELEKTRONIK LTD. ŞTI', '000AE3': 'YANG MEI TECHNOLOGY CO., LTD', '000ADC': 'RuggedCom Inc.', '000AB7': 'Cisco Systems, Inc', '000AAD': 'Stargames Corporation', '000AB1': 'GENETEC Corporation', '000AB9': 'Astera Technologies Corp.', '000A90': 'Bayside Interactive, Inc.', '000A9D': 'King Young Technology Co. Ltd.', '000AA1': 'V V S Limited', '000AA4': 'SHANGHAI SURVEILLANCE TECHNOLOGY CO,LTD', '000A9E': 'BroadWeb Corportation', '000AE0': 'Fujitsu Softek', '000A8B': 'Cisco Systems, Inc', '000A88': 'InCypher S.A.', '000ABB': 'Taiwan Secom Co,. Ltd', '000A7C': 'Tecton Ltd', '000A6E': 'Harmonic, Inc', '000A6D': 'EKS Elektronikservice GmbH', '000A43': 'Chunghwa Telecom Co., Ltd.', '000A45': 'Audio-Technica Corp.', '000A35': 'Xilinx', '000A3B': 'GCT Semiconductor, Inc', '000A74': 'Manticom Networks Inc.', '000A6F': 'ZyFLEX Technologies Inc', '000A63': 'DHD GmbH', '000A2E': 'MAPLE NETWORKS CO., LTD', '000A2D': 'Cabot Communications Limited', '000A25': 'CERAGON NETWORKS', '000A21': 'Integra Telecom Co. Ltd', '000A1E': 'Red-M Products Limited', '000A4B': 'DataPower Technology, Inc.', '000A33': 'Emulex Corporation', '000A67': 'OngCorp', '0009F9': 'ART JAPAN CO., LTD.', '0009FC': 'IPFLEX Inc.', '0009FD': 'Ubinetics Limited', '0009F7': 'SED, a division of Calian', '0009E2': 'Sinbon Electronics Co., Ltd.', '0009DA': 'Control Module Inc.', '0009D7': 'DC Security Products', '0009D8': 'Fält Communications AB', '000A14': 'TECO a.s.', '000A0B': 'Sealevel Systems, Inc.', '000A10': 'FAST media integrations AG', '0009DB': 'eSpace', '0009D5': 'Signal Communication, Inc.', '0009D3': 'Western DataCom Co., Inc.', '0009BC': 'Utility, Inc', '0009BE': 'Mamiya-OP Co.,Ltd.', '0009E6': 'Cyber Switching Inc.', '0009FB': 'Philips Patient Monitoring', '00098F': 'Cetacean Networks', '000987': 'NISHI NIPPON ELECTRIC WIRE & CABLE CO.,LTD.', '000989': 'VividLogic Inc.', '000986': 'Metalink LTD.', '00098C': 'Option Wireless Sweden', '000985': 'Auto Telecom Company', '00098D': 'Velocity Semiconductor', '000981': 'Newport Networks', '000955': 'Young Generation International Corp.', '00094A': 'Homenet Communications', '00094B': 'FillFactory NV', '00094D': 'Braintree Communications Pty Ltd', '000950': 'Independent Storage Corporation', '000954': 'AMiT spol. s. r. o.', '000992': 'InterEpoch Technology,INC.', '000995': 'Castle Technology Ltd', '000998': 'Capinfo Company Limited', '0009B6': 'Cisco Systems, Inc', '0009B3': 'MCM Systems Ltd', '00095C': 'Philips Medical Systems - Cardiac and Monitoring Systems (CM', '000958': 'INTELNET S.A.', '0009A3': 'Leadfly Techologies Corp. Ltd.', '0009A5': 'HANSUNG ELETRONIC INDUSTRIES DEVELOPMENT CO., LTD', '000962': 'Sonitor Technologies AS', '00095D': 'Dialogue Technology Corp.', '00099A': 'ELMO COMPANY, LIMITED', '00099C': 'Naval Research Laboratory', '000984': 'MyCasa Network Inc.', '00092B': 'iQstor Networks, Inc.', '000926': 'YODA COMMUNICATIONS, INC.', '000927': 'TOYOKEIKI CO.,LTD.', '000923': 'Heaman System Co., Ltd', '00091D': 'Proteam Computer Corporation', '0008EB': 'ROMWin Co.,Ltd.', '0008E8': 'Excel Master Ltd.', '0008DC': 'Wiznet', '0008DD': 'Telena Communications, Inc.', '0008E1': 'Barix AG', '000909': 'Telenor Connect A/S', '00090A': 'SnedFar Technology Co., Ltd.', '00090F': 'Fortinet Inc.', '00092C': 'Hitpoint Inc.', '000903': 'Panasas, Inc', '000907': 'Chrysalis Development', '000906': 'Esteem Networks', '000952': 'Auerswald GmbH & Co. KG', '000944': 'Cisco Systems, Inc', '0008FB': 'SonoSite, Inc.', '0008EE': 'Logic Product Development', '000917': 'WEM Technology Inc', '000881': 'DIGITAL HANDS CO.,LTD.', '000882': 'SIGMA CORPORATION', '000873': 'DapTechnology B.V.', '00087A': 'Wipotec GmbH', '000871': 'NORTHDATA Co., Ltd.', '0008B2': 'SHENZHEN COMPASS TECHNOLOGY DEVELOPMENT CO.,LTD', '0008B1': 'ProQuent Systems', '0008AF': 'Novatec Corporation', '0008A6': 'Multiware & Image Co., Ltd.', '00087E': 'Bon Electro-Telecom Inc.', '000880': 'BroadTel Canada Communications inc.', '00086D': 'Missouri FreeNet', '0008D4': 'IneoQuest Technologies, Inc', '0008D6': 'HASSNET Inc.', '0008CE': 'IPMobileNet Inc.', '0008C2': 'Cisco Systems, Inc', '0008C0': 'ASA SYSTEMS', '0008A3': 'Cisco Systems, Inc', '00089E': 'Beijing Enter-Net co.LTD', '0008B4': 'SYSPOL', '0008B3': 'Fastwel', '00088E': 'Nihon Computer Co., Ltd.', '0008DA': 'SofaWare Technologies Ltd.', '0007D5': '3e Technologies Int;., Inc.', '0007DB': 'Kirana Networks, Inc.', '00086A': 'Securiton Gmbh', '000863': 'Entrisphere Inc.', '000866': 'DSX Access Systems, Inc.', '0007EF': 'Lockheed Martin Tactical Systems', '0007F4': 'Eletex Co., Ltd.', '0007DD': 'Cradle Technologies', '00081B': 'Windigo Systems', '0007FF': 'Gluon Networks', '0007F7': 'Galtronics', '00085F': 'Picanol N.V.', '000852': 'Davolink Co. Inc.', '00080C': 'VDA Elettronica spa', '000804': 'ICA Inc.', '000857': 'Polaris Networks, Inc.', '00078B': 'Wegener Communications, Inc.', '000783': 'SynCom Network, Inc.', '000787': 'Idea System Co., Ltd.', '000789': 'Allradio Co., Ltd', '0007B9': 'Ginganet Corporation', '00047F': 'Chr. Mayr GmbH & Co. KG', '00047B': 'Schlumberger', '0007B4': 'Cisco Systems, Inc', '0007B3': 'Cisco Systems, Inc', '0007B5': 'Any One Wireless Ltd.', '000785': 'Cisco Systems, Inc', '000775': 'Valence Semiconductor, Inc.', '000793': 'Shin Satellite Public Company Limited', '000796': 'LSI Systems, Inc.', '00077F': 'J Communications Co., Ltd.', '000771': 'Embedded System Corporation', '000770': 'Ubiquoss Inc', '0007D1': 'Spectrum Signal Processing Inc.', '0007A2': 'Opteon Corporation', '0006F8': 'The Boeing Company', '0006FB': 'Hitachi Printing Solutions, Ltd.', '0006FC': 'Fnet Co., Ltd.', '0006F4': 'Prime Electronics & Satellitics Inc.', '000738': 'Young Technology Co., Ltd.', '000729': 'Kistler Instrumente AG', '00072A': 'Innovance Networks', '00074A': 'Carl Valentin GmbH', '00073C': 'Telecom Design', '000736': 'Data Video Technologies Co., Ltd.', '000750': 'Cisco Systems, Inc', '000742': 'Ormazabal', '00074B': 'Daihen Corporation', '000764': 'YoungWoo Telecom Co. Ltd.', '000766': 'Chou Chin Industrial Co., Ltd.', '000761': '29530', '000755': 'Lafon', '000759': 'Boris Manufacturing Corp.', '000751': 'm-u-t AG', '000719': 'Mobiis Co., Ltd.', '00070D': 'Cisco Systems, Inc', '00070E': 'Cisco Systems, Inc', '00070B': 'Novabase SGPS, SA', '000710': 'Adax, Inc.', '0006E8': 'Optical Network Testing, Inc.', '0006EE': 'Shenyang Neu-era Information & Technology Stock Co., Ltd', '0006E2': 'Ceemax Technology Co., Ltd.', '0006A5': 'PINON Corp.', '00069D': 'Petards Ltd', '0006A8': 'KC Technology, Inc.', '0006A0': 'Mx Imaging', '0006BE': 'Baumer Optronic GmbH', '0006BA': 'Westwave Communications', '0006C4': 'Piolink Inc.', '0006B5': 'Source Photonics, Inc.', '00068F': 'Telemonitor, Inc.', '0006DA': 'ITRAN Communications Ltd.', '0006D0': 'Elgar Electronics Corp.', '0006CC': 'JMI Electronics Co., Ltd.', '0006BF': 'Accella Technologies Co., Ltd.', '000690': 'Euracom Communication GmbH', '0006E1': 'Techno Trade s.a', '00062E': 'Aristos Logic Corp.', '000624': 'Gentner Communications Corp.', '000625': 'The Linksys Group, Inc.', '000627': 'Uniwide Technologies, Inc.', '00062A': 'Cisco Systems, Inc', '00062C': 'Bivio Networks', '000621': 'Hinox, Co., Ltd.', '00064E': 'Broad Net Technology Inc.', '00062D': 'TouchStar Technologies, L.L.C.', '000646': 'ShenZhen XunBao Network Technology Co Ltd', '00064B': 'Alexon Co., Ltd.', '00063C': 'Intrinsyc Software International Inc.', '000630': 'Adtranz Sweden', '000637': 'Toptrend-Meta Information (ShenZhen) Inc.', '00061C': 'Hoshino Metal Industries, Ltd.', '000623': 'MGE UPS Systems France', '00060B': 'Artesyn Embedded Technologies', '00067E': 'WinCom Systems, Inc.', '000677': 'SICK AG', '000666': 'Roving Networks', '000667': 'Tripp Lite', '00064D': 'Sencore', '000660': 'NADEX Co., Ltd.', '000679': 'Konami Corporation', '00066C': 'Robinson Corporation', '000615': 'Kimoto Electric Co., Ltd.', '00060A': 'Blue2space', '0005CE': 'Prolink Microsystems Corporation', '0005C2': 'Soronti, Inc.', '0005DF': 'Electronic Innovation, Inc.', '0005DE': 'Gi Fone Korea, Inc.', '0005E0': 'Empirix Corp.', '0005D8': 'Arescom, Inc.', '0005E4': 'Red Lion Controls Inc.', '0005F2': 'Power R, Inc.', '0005F3': 'Webyn', '000601': 'Otanikeiki Co., Ltd.', '000605': 'Inncom International, Inc.', '0005FA': 'IPOptical, Inc.', '0005E5': 'Renishaw PLC', '0005F5': 'Geospace Technologies', '0005FD': 'PacketLight Networks Ltd.', '0005D4': 'FutureSmart Networks, Inc.', '0005C4': 'Telect, Inc.', '0005A3': 'QEI, Inc.', '00059E': 'Zinwell Corporation', '0005A5': 'KOTT', '0005B3': 'Asahi-Engineering Co., Ltd.', '00059D': 'Daniel Computing Systems, Inc.', '0005A4': 'Lucid Voice Ltd.', '000563': 'J-Works, Inc.', '000557': 'Agile TV Corporation', '00055B': 'Charles Industries, Ltd.', '000554': 'Rangestar Wireless', '000553': 'DVC Company, Inc.', '000566': 'Secui.com Corporation', '00056C': 'Hung Chang Co., Ltd.', '00055F': 'Cisco Systems, Inc', '00055D': 'D-LINK SYSTEMS, INC.', '000561': 'nac Image Technology, Inc.', '000594': 'HMS Industrial Networks', '00056F': 'Innomedia Technologies Pvt. Ltd.', '000574': 'Cisco Systems, Inc', '000567': 'Etymonic Design, Inc.', '000565': 'Tailyn Communication Company Ltd.', '00058E': 'Flextronics International GmbH & Co. Nfg. KG', '000532': 'Cisco Systems, Inc', '000536': 'Danam Communications, Inc.', '000542': 'Otari, Inc.', '000537': 'Nets Technology Co., Ltd.', '00057C': 'RCO Security AB', '000583': 'ImageCom Limited', '00054B': 'Eaton Automation AG', '0004C8': 'LIBA Maschinenfabrik GmbH', '0004CC': 'Peek Traffic B.V.', '0004BF': 'VersaLogic Corp.', '0004C3': 'CASTOR Informatique', '0004F6': 'Amphus', '0004F4': 'Infinite Electronics Inc.', '0004F1': 'WhereNet', '000521': 'Control Microsystems', '000523': 'AVL List GmbH', '00051F': 'Taijin Media Co., Ltd.', '00050C': 'Network Photonics, Inc.', '0004EC': 'Memobox SA', '0004E4': 'Daeryung Ind., Inc.', '00050A': 'ICS Spa', '000511': 'Complementary Technologies Ltd', '000506': 'Reddo Networks AB', '0004E2': 'SMC Networks, Inc.', '0004CB': 'Tdsoft Communication, Ltd.', '000526': 'IPAS GmbH', '000467': 'Wuhan Research Institute of MII', '00045A': 'The Linksys Group, Inc.', '000463': 'Bosch Security Systems', '00045C': 'Mobiwave Pte Ltd', '000453': 'YottaYotta, Inc.', '000450': 'DMD Computers SRL', '000443': 'Agilent Technologies, Inc.', '000449': 'Mapletree Networks', '00042F': 'International Communications Products, Inc.', '000429': 'Pixord Corporation', '000426': 'Autosys', '0004B8': 'Kumahira Co., Ltd.', '0004B5': 'Equitrac Corporation', '0004B1': 'Signal Technology, Inc.', '000451': 'Medrad, Inc.', '000483': 'Deltron Technology, Inc.', '000441': 'Half Dome Systems, Inc.', '0004A5': 'Barco Projection Systems NV', '000482': 'Medialogic Corp.', '0003CA': 'MTS Systems Corp.', '0003C7': 'hopf Elektronik GmbH', '0003C2': 'Solphone K.K.', '0003F3': 'Dazzle Multimedia, Inc.', '0003EC': 'ICG Research, Inc.', '0003E9': 'Akara Canada, Inc.', '0003E5': 'Hermstedt SG', '0003E8': 'Wavelength Digital Limited', '000421': 'Ocular Networks', '000424': 'TMC s.r.l.', '00041D': 'Corega of America', '00041A': 'Ines Test and Measurement GmbH & CoKG', '00041E': 'Shikoku Instrumentation Co., Ltd.', '000413': 'snom technology GmbH', '0003B4': 'Macrotek International Corp.', '0003A6': 'Traxit Technology, Inc.', '0003A4': 'Imation Corp.', '0003AB': 'Bridge Information Systems', '000403': 'Nexsi Corporation', '000406': 'Fa. Metabox AG', '0003F8': 'SanCastle Technologies, Inc.', '0003FA': 'TiMetra Networks', '0003C6': 'ICUE Systems, Inc.', '0003BB': 'Signal Communications Limited', '0003BE': 'Netility', '0003DF': 'Desana Systems', '0003DA': 'Takamisawa Cybernetics Co., Ltd.', '0003D9': 'Secheron SA', '0003FB': 'ENEGATE Co.,Ltd.', '0003F6': 'Allegro Networks, Inc.', '000415': 'Rasteme Systems Co., Ltd.', '000398': 'WISI', '000395': 'California Amplifier', '000392': 'Hyundai Teletek Co., Ltd.', '00038E': 'Atoga Systems, Inc.', '00031A': 'Beijing Broad Telecom Ltd., China', '00035B': 'BridgeWave Communications', '000357': 'Intervoice-Brite, Inc.', '00037F': 'Atheros Communications, Inc.', '0002F0': 'AME Optimedia Technology Co., Ltd.', '00039E': 'Tera System Co., Ltd.', '000397': 'FireBrick Limited', '00033F': 'BigBand Networks, Ltd.', '000327': "ACT'L", '00032E': 'Scope Information Management, Ltd.', '00037C': 'Coax Media', '000368': 'Embedone Co., Ltd.', '000345': 'Routrek Networks Corporation', '0002C8': 'Technocom Communications Technology (pte) Ltd', '0002B8': 'WHI KONSULT AB', '0002A9': 'RACOM, s.r.o.', '0002BB': 'Continuous Computing Corp', '0002BC': 'LVL 7 Systems, Inc.', '00030F': 'Digital China (Shanghai) Networks Ltd.', '000311': 'Micro Technology Co., Ltd.', '00030D': 'Uniwill Computer Corp.', '000309': 'Texcel Technology PLC', '000303': 'JAMA Electronics Co., Ltd.', '000305': 'MSC Vertriebs GmbH', '0002FE': 'Viditec, Inc.', '00019F': 'ReadyNet', '0002FF': 'Handan BroadInfoCom', '0002F4': 'PCTEL, Inc.', '0002E9': 'CS Systemes De Securite - C3S', '0002E5': 'Timeware Ltd.', '0002E0': 'ETAS GmbH', '0002CE': 'FoxJet, Inc.', '0002C3': 'Arelnet Ltd.', '000316': 'Nobell Communications, Inc.', '000329': 'F3, Inc.', '000321': 'Reco Research Co., Ltd.', '0002F5': 'VIVE Synergies, Inc.', '0002D5': 'ACR', '0002AB': 'CTC Union Technologies Co., Ltd.', '0002A4': 'AddPac Technology Co., Ltd.', '0002A3': 'ABB Switzerland Ltd, Power Systems', '0002A0': 'Flatstack Ltd.', '0002B2': 'Cablevision', '0002B7': 'Watanabe Electric Industry Co., Ltd.', '0002AF': 'TeleCruz Technology, Inc.', '0002A8': 'Air Link Technology', '00026A': 'Cocess Telecom Co., Ltd.', '00026C': 'Philips CFT', '000262': 'Soyo Group Soyo Com Tech Co., Ltd', '000265': 'Virditech Co. Ltd.', '00025B': 'Cambridge Silicon Radio', '000256': 'Alpha Processor, Inc.', '000259': 'Tsann Kuen China (Shanghai)Enterprise Co., Ltd. IT Group', '000294': 'Tokyo Sokushin Co., Ltd.', '000296': 'Lectron Co,. Ltd.', '00028E': 'Rapid 5 Networks, Inc.', '00024F': 'IPM Datacom S.R.L.', '000271': 'Zhone Technologies', '00028A': 'Ambit Microsystems Corporation', '0001FA': 'HOROSCAS', '000282': 'ViaClix, Inc.', '000285': 'Riverstone Networks', '000279': 'Control Applications, Ltd.', '000251': 'Soma Networks, Inc.', '0001F5': 'ERIM S.A.', '0001FF': 'Data Direct Networks, Inc.', '0001FC': 'Keyence Corporation', '0001FD': 'Digital Voice Systems, Inc.', '000210': 'Fenecom', '00020B': 'Native Networks, Inc.', '000218': 'Advanced Scientific Corp', '0001EE': 'Comtrol Europe, Ltd.', '0001F0': 'Tridium, Inc.', '0001F1': 'Innovative Concepts, Inc.', '0001E2': 'Ando Electric Corporation', '00022F': 'P-Cube, Ltd.', '000227': 'ESD Electronic System Design GmbH', '00021D': 'Data General Communication Ltd.', '000219': 'Paralon Technologies', '000203': 'Woonsang Telecom, Inc.', '0001D3': 'PAXCOMM, Inc.', '0001E1': 'Kinpo Electronics, Inc.', '00022C': 'ABB Bomem, Inc.', '00023C': 'Creative Technology, Ltd.', '00306C': 'Hitex Holding GmbH', '00308B': 'Brix Networks', '000177': 'EDSL', '00014D': 'Shin Kin Enterprises Co., Ltd', '0001DA': 'WINCOMM Corporation', '0001D2': 'inXtron, Inc. ', '0001C6': 'Quarry Technologies', '00016E': 'Conklin Corporation', '000174': 'CyberOptics Corporation', '00015E': 'BEST TECHNOLOGY CO., LTD.', '000161': 'Meta Machine Technology', '0001A1': 'Mag-Tek, Inc.', '000186': 'Uwe Disch', '0001A6': 'Scientific-Atlanta Arcodan A/S', '000172': 'TechnoLand Co., LTD.', '0001A0': 'Infinilink Corporation', '000196': 'Cisco Systems, Inc', '000199': 'HeiSei Electronics', '00018B': 'NetLinks Co., Ltd.', '00018D': 'AudeSi Technologies', '00019D': 'E-Control Systems, Inc.', '0001CE': 'Custom Micro Products, Ltd.', '0001BB': 'Frequentis', '0001BC': 'Brains Corporation', '0001C0': 'CompuLab, Ltd.', '00017C': 'AG-E GmbH', '000108': 'AVLAB Technology, Inc.', '00062B': 'INTRASERVER TECHNOLOGY', '000100': "EQUIP'TRANS", '00B09D': 'Point Grey Research Inc.', '000110': 'Gotham Networks', '000112': 'Shark Multimedia Inc.', '000116': 'Netspect Technologies, Inc.', '00B06D': 'Jones Futurex Inc.', '00B094': 'Alaris, Inc.', '0030F0': 'Uniform Industrial Corp.', '00013B': 'BNA SYSTEMS', '000134': 'Selectron Systems AG', '000139': 'Point Multimedia Systems', '00013E': 'Ascom Tateco AB', '00012E': 'PC Partner Ltd.', '000132': 'Dranetz - BMI', '000113': 'OLYMPUS CORPORATION', '00011E': 'Precidia Technologies, Inc.', '000155': 'Promise Technology, Inc.', '003094': 'Cisco Systems, Inc', '00308A': 'NICOTRA SISTEMI S.P.A', '003072': 'Intellibyte Inc.', '003040': 'Cisco Systems, Inc', '003032': 'MagicRam, Inc.', '0030EA': 'TeraForce Technology Corporation', '00309B': 'Smartware', '003045': 'Village Networks, Inc. (VNI)', '0030E5': 'Amper Datos S.A.', '003006': 'SUPERPOWER COMPUTER', '003038': 'XCP, INC.', '003079': 'CQOS, INC.', '00300C': 'CONGRUENCY, LTD.', '00304C': 'APPIAN COMMUNICATIONS, INC.', '0030E8': 'ENSIM CORP.', '0030C9': 'LuxN, N', '003028': 'FASE Saldatura srl', '003069': 'IMPACCT TECHNOLOGY CORP.', '0030C3': 'FLUECKIGER ELEKTRONIK AG', '00305A': 'TELGEN CORPORATION', '003010': 'VISIONETICS INTERNATIONAL', '0030D9': 'DATACORE SOFTWARE CORP.', '003026': 'HeiTel Digital Video GmbH', '003077': 'ONPREM NETWORKS', '003047': 'NISSEI ELECTRIC CO., LTD.', '0030D4': 'AAE Systems, Inc.', '00D0D7': 'B2C2, INC.', '00D073': 'ACN ADVANCED COMMUNICATIONS', '00D057': 'ULTRAK, INC.', '0030AB': 'DELTA NETWORKS, INC.', '003049': 'BRYANT TECHNOLOGY, LTD.', '00306D': 'LUCENT TECHNOLOGIES', '003017': 'BlueArc UK Ltd', '00301C': 'ALTVATER AIRDATA SYSTEMS', '003080': 'Cisco Systems, Inc', '0030F7': 'RAMIX INC.', '0030D0': 'Tellabs', '003014': 'DIVIO, INC.', '003081': 'ALTOS C&C', '00D0F0': 'CONVISION TECHNOLOGY GMBH', '00D010': 'CONVERGENT NETWORKS, INC.', '00D04B': 'LA CIE GROUP S.A.', '00D00E': 'PLURIS, INC.', '00D012': 'GATEWORKS CORP.', '00D04D': 'DIV OF RESEARCH & STATISTICS', '00D02E': 'COMMUNICATION AUTOMATION CORP.', '00D0C5': 'COMPUTATIONAL SYSTEMS, INC.', '00D046': 'DOLBY LABORATORIES, INC.', '00D0DE': 'PHILIPS MULTIMEDIA NETWORK', '00D00C': 'SNIJDER MICRO SYSTEMS', '00D017': 'SYNTECH INFORMATION CO., LTD.', '00D036': 'TECHNOLOGY ATLANTA CORP.', '00D0E3': 'ELE-CHEM ENGINEERING CO., LTD.', '00D0B6': 'CRESCENT NETWORKS, INC.', '00D0C4': 'TERATECH CORPORATION', '00D061': 'TREMON ENTERPRISES CO., LTD.', '00D0E5': 'SOLIDUM SYSTEMS CORP.', '00D045': 'KVASER AB', '00D004': 'PENTACOM LTD.', '00D005': 'ZHS ZEITMANAGEMENTSYSTEME', '00D0D3': 'Cisco Systems, Inc', '00D026': 'HIRSCHMANN AUSTRIA GMBH', '00D0DA': 'TAICOM DATA SYSTEMS CO., LTD.', '00D03C': 'Vieo, Inc.', '00D0B4': 'KATSUJIMA CO., LTD.', '00D086': 'FOVEON, INC.', '00D0A8': 'NETWORK ENGINES, INC.', '00D0AB': 'DELTAKABEL TELECOM CV', '00D0E8': 'MAC SYSTEM CO., LTD.', '00D06B': 'SR TELECOM INC.', '00D0DC': 'MODULAR MINING SYSTEMS, INC.', '00D01E': 'PINGTEL CORP.', '00D0CA': 'Intrinsyc Software International Inc.', '00D065': 'TOKO ELECTRIC', '00D09A': 'FILANET CORPORATION', '00D0AE': 'ORESIS COMMUNICATIONS, INC.', '00D0F2': 'MONTEREY NETWORKS', '00D014': 'ROOT, INC.', '00D023': 'INFORTREND TECHNOLOGY, INC.', '00D0A2': 'INTEGRATED DEVICE', '00D034': 'ORMEC SYSTEMS CORP.', '00D08A': 'PHOTRON USA', '00D0A7': 'TOKYO SOKKI KENKYUJO CO., LTD.', '00D01D': 'FURUNO ELECTRIC CO., LTD.', '00504C': 'Galil Motion Control', '005076': 'IBM Corp', '0050D4': 'JOOHONG INFORMATION &', '0050A6': 'OPTRONICS', '005084': 'ATL PRODUCTS', '0050A9': 'MOLDAT WIRELESS TECHNOLGIES', '00509B': 'SWITCHCORE AB', '00507E': 'NEWER TECHNOLOGY', '0050CE': 'LG INTERNATIONAL CORP.', '0050F7': 'VENTURE MANUFACTURING (SINGAPORE) LTD.', '005019': 'SPRING TIDE NETWORKS, INC.', '0050FD': 'VISIONCOMM CO., LTD.', '0050BF': 'Metalligence Technology Corp.', '005036': 'NETCAM, LTD.', '0050DB': 'CONTEMPORARY CONTROL', '00507C': 'VIDEOCON AG', '005047': 'Private', '00D06C': 'SHAREWAVE, INC.', '0050A7': 'Cisco Systems, Inc', '005055': 'DOMS A/S', '005072': 'CORVIS CORPORATION', '00D0EE': 'DICTAPHONE CORPORATION', '00501B': 'ABL CANADA, INC.', '009057': 'AANetcom, Inc.', '009083': 'TURBO COMMUNICATION, INC.', '00903D': 'BIOPAC SYSTEMS, INC.', '0090D7': 'NetBoost Corp.', '005083': 'GILBARCO, INC.', '0050DC': 'TAS TELEFONBAU A. SCHWABE GMBH & CO. KG', '005008': 'TIVA MICROCOMPUTER CORP. (TMC)', '005052': 'TIARA NETWORKS, INC.', '005027': 'GENICOM CORPORATION', '00505A': 'NETWORK ALCHEMY, INC.', '005039': 'MARINER NETWORKS', '005064': 'CAE ELECTRONICS', '0050B8': 'INOVA COMPUTERS GMBH & CO. KG', '00505B': 'KAWASAKI LSI U.S.A., INC.', '0050CC': 'Seagate Cloud Systems Inc', '005016': 'Molex Canada Ltd', '00501F': 'MRG SYSTEMS, LTD.', '005043': 'MARVELL SEMICONDUCTOR, INC.', '005095': 'PERACOM NETWORKS', '0050FA': 'OXTEL, LTD.', '009038': 'FOUNTAIN TECHNOLOGIES, INC.', '0090B0': 'VADEM', '0090EF': 'INTEGRIX, INC.', '0090C5': 'INTERNET MAGIC, INC.', '00908C': 'ETREND ELECTRONICS, INC.', '009048': 'ZEAL CORPORATION', '0090B9': 'BERAN INSTRUMENTS LTD.', '0090C4': 'JAVELIN SYSTEMS, INC.', '0090A5': 'SPECTRA LOGIC', '0090A3': 'Corecess Inc.', '009082': 'FORCE INSTITUTE', '009000': 'DIAMOND MULTIMEDIA', '00906E': 'PRAXON, INC.', '009054': 'INNOVATIVE SEMICONDUCTORS, INC', '009061': 'PACIFIC RESEARCH & ENGINEERING CORPORATION', '00900B': 'LANNER ELECTRONICS, INC.', '0090CE': ' avateramedical Mechatronics GmbH', '009007': 'DOMEX TECHNOLOGY CORP.', '00902D': 'DATA ELECTRONICS (AUST.) PTY, LTD.', '0090D4': 'BindView Development Corp.', '009029': 'CRYPTO AG', '0090DF': 'MITSUBISHI CHEMICAL AMERICA, INC.', '0090C0': 'K.J. LAW ENGINEERS, INC.', '00901F': 'ADTEC PRODUCTIONS, INC.', '009024': 'PIPELINKS, INC.', '00903A': 'NIHON MEDIA TOOL INC.', '0090B2': 'AVICI SYSTEMS INC.', '0090B6': 'FIBEX SYSTEMS', '009063': 'COHERENT COMMUNICATIONS SYSTEMS CORPORATION', '009062': 'ICP VORTEX COMPUTERSYSTEME GmbH', '0010D3': 'GRIPS ELECTRONIC GMBH', '0010FB': 'ZIDA TECHNOLOGIES LIMITED', '001053': 'COMPUTER TECHNOLOGY CORP.', '0010ED': 'SUNDANCE TECHNOLOGY, INC.', '00106C': 'EDNT GmbH', '0010E9': 'RAIDTEC LTD.', '001003': 'IMATRON, INC.', '001071': 'ADVANET INC.', '009015': 'CENTIGRAM COMMUNICATIONS CORP.', '009095': 'UNIVERSAL AVIONICS', '009041': 'APPLIED DIGITAL ACCESS', '00905A': 'DEARBORN GROUP, INC.', '009011': 'WAVTrace, Inc.', '009065': 'FINISAR CORPORATION', '009023': 'ZILOG INC.', '0090F6': 'ESCALATE NETWORKS, INC.', '0090A8': 'NineTiles Networks, Ltd.', '00102A': 'ZF MICROSYSTEMS, INC.', '0010E5': 'SOLECTRON TEXAS', '00109D': 'CLARINET SYSTEMS, INC.', '00100E': 'MICRO LINEAR COPORATION', '0090EC': 'PYRESCOM', '0090C3': 'TOPIC SEMICONDUCTOR CORP.', '0010C8': 'COMMUNICATIONS ELECTRONICS SECURITY GROUP', '0010F3': 'Nexcom International Co., Ltd.', '001086': 'ATTO Technology, Inc.', '0010DF': 'RISE COMPUTER INC.', '001072': 'GVN TECHNOLOGIES, INC.', '0010DA': 'Kollmorgen Corp', '0010E4': 'NSI CORPORATION', '00107E': 'BACHMANN ELECTRONIC GmbH', '0010A0': 'INNOVEX TECHNOLOGIES, INC.', '001016': 'T.SQWARE', '001090': 'CIMETRICS, INC.', '0010F5': 'AMHERST SYSTEMS, INC.', '00103D': 'PHASECOM, LTD.', '001096': 'TRACEWELL SYSTEMS, INC.', '001082': 'JNA TELECOMMUNICATIONS LIMITED', '001098': 'STARNET TECHNOLOGIES, INC.', '001042': 'Alacritech, Inc.', '001068': 'COMOS TELECOM', '0010EA': 'ADEPT TECHNOLOGY', '0010AE': 'SHINKO ELECTRIC INDUSTRIES CO.', '0010C4': 'MEDIA GLOBAL LINKS CO., LTD.', '0010FE': 'DIGITAL EQUIPMENT CORPORATION', '001056': 'SODICK CO., LTD.', '0010CD': 'INTERFACE CONCEPT', '001061': 'HOSTLINK CORP.', '001099': 'InnoMedia, Inc.', '0010E1': 'S.I. TECH, INC.', '0010BB': 'DATA & INFORMATION TECHNOLOGY', '001020': 'Hand Held Products Inc', '00103A': 'DIAMOND NETWORK TECH', '001004': 'THE BRANTLEY COILE COMPANY,INC', '0010EF': 'DBTEL INCORPORATED', '001088': 'AMERICAN NETWORKS INC.', '001022': 'SatCom Media Corporation', '001076': 'EUREM GmbH', '00103F': 'TOLLGRADE COMMUNICATIONS, INC.', '001049': 'ShoreTel, Inc', '00105E': 'Spirent plc, Service Assurance Broadband', '0010AF': 'TAC SYSTEMS, INC.', '00108C': 'Fujitsu Services Ltd', '0010F7': 'IRIICHI TECHNOLOGIES Inc.', '0010AB': 'KOITO ELECTRIC INDUSTRIES, LTD.', '001010': 'INITIO CORPORATION', '0010F2': 'ANTEC', '00E007': 'Avaya ECS Ltd', '0010BE': 'MARCH NETWORKS CORPORATION', '001058': 'ArrowPoint Communications', '00100F': 'INDUSTRIAL CPU SYSTEMS', '0010BC': 'Aastra Telecom', '00E0BF': 'TORRENT NETWORKING TECHNOLOGIES CORP.', '00E0E3': 'SK-ELEKTRONIK GMBH', '00E0C6': 'LINK2IT, L.L.C.', '00E0E5': 'CINCO NETWORKS, INC.', '00E061': 'EdgePoint Networks, Inc.', '00E053': 'CELLPORT LABS, INC.', '00E0D3': 'DATENTECHNIK GmbH', '00E043': 'VitalCom', '00E0B3': 'EtherWAN Systems, Inc.', '00E0ED': 'SILICOM, LTD.', '00E0B8': 'GATEWAY 2000', '00E07C': 'METTLER-TOLEDO, INC.', '00E026': 'Redlake MASD LLC', '00E020': 'TECNOMEN OY', '00E00D': 'RADIANT SYSTEMS', '00E0DC': 'NEXWARE CORP.', '00E037': 'CENTURY CORPORATION', '00E0C2': 'NECSY S.p.A.', '00E0FB': 'LEIGHTRONIX, INC.', '00E09B': 'ENGAGE NETWORKS, INC.', '00E045': 'TOUCHWAVE, INC.', '00E040': 'DeskStation Technology, Inc.', '00E01A': 'COMTEC SYSTEMS. CO., LTD.', '00E078': 'BERKELEY NETWORKS', '00E087': 'LeCroy - Networking Productions Division', '00E041': 'CSPI', '00E0E2': 'INNOVA CORP.', '00E081': 'TYAN COMPUTER CORP.', '00E057': 'HAN MICROTELECOM. CO., LTD.', '00E0BC': 'SYMON COMMUNICATIONS, INC.', '00E082': 'ANERMA', '00E077': 'WEBGEAR, INC.', '00E056': 'HOLONTECH CORPORATION', '00E031': 'HAGIWARA ELECTRIC CO., LTD.', '00E00B': 'ROOFTOP COMMUNICATIONS CORP.', '00E0B2': 'TELMAX COMMUNICATIONS CORP.', '00E02F': 'MCNS HOLDINGS, L.P.', '00E07E': 'WALT DISNEY IMAGINEERING', '00E099': 'SAMSON AG', '0060AE': 'TRIO INFORMATION SYSTEMS AB', '006053': 'TOYODA MACHINE WORKS, LTD.', '006056': 'NETWORK TOOLS, INC.', '00600C': 'Eurotech Inc.', '00601C': 'TELXON CORPORATION', '00605F': 'NIPPON UNISOFT CORPORATION', '006091': 'FIRST PACIFIC NETWORKS, INC.', '00601D': 'LUCENT TECHNOLOGIES', '00607B': 'FORE SYSTEMS, INC.', '00E0EC': 'CELESTICA INC.', '00E06C': 'Ultra Electronics Command & Control Systems', '00E04A': 'ZX Technologies, Inc', '0060C9': 'ControlNet, Inc.', '00E07A': 'MIKRODIDAKT AB', '006032': 'I-CUBE, INC.', '006033': 'ACUITY IMAGING, INC.', '006013': 'NETSTAL MASCHINEN AG', '006022': 'VICOM SYSTEMS, INC.', '0060EE': 'APOLLO', '0060D8': 'ELMIC SYSTEMS, INC.', '0060EF': 'FLYTECH TECHNOLOGY CO., LTD.', '006085': 'Storage Concepts', '006011': 'CRYSTAL SEMICONDUCTOR CORP.', '0060F5': 'ICON WEST, INC.', '006062': 'TELESYNC, INC.', '0060E9': 'ATOP TECHNOLOGIES, INC.', '006043': 'iDirect, INC.', '006028': 'MACROVISION CORPORATION', '0060F0': 'JOHNSON & JOHNSON MEDICAL, INC', '0060E0': 'AXIOM TECHNOLOGY CO., LTD.', '006096': 'T.S. MICROTECH INC.', '00603A': 'QUICK CONTROLS LTD.', '000288': 'GLOBAL VILLAGE COMMUNICATION', '006034': 'ROBERT BOSCH GmbH', '006050': 'INTERNIX INC.', '0060FA': 'EDUCATIONAL TECHNOLOGY RESOURCES, INC.', '0060DA': 'Red Lion Controls, LP', '0060E4': 'COMPUSERVE, INC.', '00608F': 'TEKRAM TECHNOLOGY CO., LTD.', '0060C4': 'SOLITON SYSTEMS K.K.', '00A03C': 'EG&G NUCLEAR INSTRUMENTS', '00A0C4': 'CRISTIE ELECTRONICS LTD.', '00A063': 'JRL SYSTEMS, INC.', '00A02C': 'interWAVE Communications', '00A0F7': 'V.I COMPUTER CORP.', '00A090': 'TimeStep Corporation', '00A0EA': 'ETHERCOM CORP.', '00A0DC': 'O.N. ELECTRONIC CO., LTD.', '00A00B': 'COMPUTEX CO., LTD.', '00A0E2': 'Keisokugiken Corporation', '00A033': 'imc MeBsysteme GmbH', '00A0A9': 'NAVTEL COMMUNICATIONS INC.', '00A071': 'VIDEO LOTTERY TECHNOLOGIES,INC', '006000': 'XYCOM INC.', '006045': 'PATHLIGHT TECHNOLOGIES', '00A05D': 'CS COMPUTER SYSTEME GmbH', '00A061': 'PURITAN BENNETT', '0060A6': 'PARTICLE MEASURING SYSTEMS', '00602A': 'SYMICRON COMPUTER COMMUNICATIONS, LTD.', '00A06D': 'MANNESMANN TALLY CORPORATION', '00A0F6': 'AutoGas Systems Inc.', '0060BE': 'WEBTRONICS', '0060BF': 'MACRAIGOR SYSTEMS, INC.', '006080': 'MICROTRONIX DATACOM LTD.', '00A037': 'Mindray DS USA, Inc.', '00A04C': 'INNOVATIVE SYSTEMS & TECHNOLOGIES, INC.', '00A031': 'HAZELTINE CORPORATION, MS 1-17', '00A041': 'INFICON', '00A0A7': 'VORAX CORPORATION', '00A07E': 'AVID TECHNOLOGY, INC.', '00A06F': 'Color Sentinel Systems, LLC', '00A0C7': 'TADIRAN TELECOMMUNICATIONS', '00A01A': 'BINAR ELEKTRONIK AB', '00A088': 'ESSENTIAL COMMUNICATIONS', '00A0C2': 'R.A. SYSTEMS CO., LTD.', '00A098': 'NetApp', '00A04B': 'TFL LAN INC.', '00A064': 'KVB/ANALECT', '00A03E': 'ATM FORUM', '00A01F': 'TRICORD SYSTEMS, INC.', '00A0FB': 'TORAY ENGINEERING CO., LTD.', '00A06C': 'SHINDENGEN ELECTRIC MFG. CO., LTD.', '00A0DB': 'FISHER & PAYKEL PRODUCTION', '00A081': 'ALCATEL DATA NETWORKS', '00A0B1': 'FIRST VIRTUAL CORPORATION', '002010': 'JEOL SYSTEM TECHNOLOGY CO. LTD', '00209F': 'MERCURY COMPUTER SYSTEMS, INC.', '00A073': 'COM21, INC.', '00A03A': 'KUBOTEK CORPORATION', '00A0B2': 'SHIMA SEIKI', '00A08B': 'ASTON ELECTRONIC DESIGNS LTD.', '00A097': 'JC INFORMATION SYSTEMS', '00A027': 'FIREPOWER SYSTEMS, INC.', '00A046': 'SCITEX CORP. LTD.', '00A0D4': 'RADIOLAN, INC.', '00A092': 'H. BOLLMANN MANUFACTURERS, LTD', '00200D': 'CARL ZEISS', '00202D': 'TAIYO CORPORATION', '002091': 'J125, NATIONAL SECURITY AGENCY', '0020BD': 'NIOBRARA R & D CORPORATION', '002054': 'Sycamore Networks', '0020A7': 'PAIRGAIN TECHNOLOGIES, INC.', '002055': 'ALTECH CO., LTD.', '00200A': 'SOURCE-COMM CORP.', '0020CF': 'TEST & MEASUREMENT SYSTEMS INC', '0020B4': 'TERMA ELEKTRONIK AS', '0020E4': 'HSING TECH ENTERPRISE CO., LTD', '00206C': 'EVERGREEN TECHNOLOGY CORP.', '00205E': 'CASTLE ROCK, INC.', '002012': 'CAMTRONICS MEDICAL SYSTEMS', '002075': 'MOTOROLA COMMUNICATION ISRAEL', '0020A5': 'API ENGINEERING', '002064': 'PROTEC MICROSYSTEMS, INC.', '002033': 'SYNAPSE TECHNOLOGIES, INC.', '0020CB': 'PRETEC ELECTRONICS CORP.', '0020EB': 'CINCINNATI MICROWAVE, INC.', '0020A0': 'OA LABORATORY CO., LTD.', '0020E2': 'INFORMATION RESOURCE ENGINEERING', '002007': 'SFA, INC.', '00205C': 'InterNet Systems of Florida, Inc.', '0020A2': 'GALCOM NETWORKING LTD.', '002031': 'Tattile SRL ', '0020D0': 'VERSALYNX CORPORATION', '0020B9': 'METRICOM, INC.', '002039': 'SCINETS', '002072': 'WORKLINK INNOVATIONS', '0020EC': 'TECHWARE SYSTEMS CORP.', '00206E': 'XACT, INC.', '0020F1': 'ALTOS INDIA LIMITED', '002041': 'DATA NET', '002076': 'REUDO CORPORATION', '0020E8': 'DATATREK CORPORATION', '0020C5': 'EAGLE TECHNOLOGY', '002009': 'PACKARD BELL ELEC., INC.', '002027': 'MING FORTUNE INDUSTRY CO., LTD', '00208A': 'SONIX COMMUNICATIONS, LTD.', '0020D2': 'RAD DATA COMMUNICATIONS, LTD.', '002002': 'SERITECH ENTERPRISE CO., LTD.', '00204B': 'AUTOCOMPUTER CO., LTD.', '0020EA': 'EFFICIENT NETWORKS, INC.', '00206A': 'OSAKA COMPUTER CORP.', '0020DB': 'XNET TECHNOLOGY, INC.', '0020BB': 'ZAX CORPORATION', '0020A8': 'SAST TECHNOLOGY CORP.', '002045': 'ION Networks, Inc.', '002049': 'COMTRON, INC.', '002050': 'KOREA COMPUTER INC.', '002084': 'OCE PRINTING SYSTEMS, GMBH', '00208C': 'GALAXY NETWORKS, INC.', '0020A6': 'Proxim Wireless', '00202C': 'WELLTRONIX CO., LTD.', '002021': 'ALGORITHMS SOFTWARE PVT. LTD.', '00C0F9': 'Artesyn Embedded Technologies', '00C075': 'XANTE CORPORATION', '001C7C': 'PERQ SYSTEMS CORPORATION', '00C039': 'Teridian Semiconductor Corporation', '00C0A9': 'BARRON MCCANN LTD.', '00C04B': 'CREATIVE MICROSYSTEMS', '00C0B9': 'FUNK SOFTWARE, INC.', '00C015': 'NEW MEDIA CORPORATION', '00C083': 'TRACE MOUNTAIN PRODUCTS, INC.', '00C094': 'VMX INC.', '00C019': 'LEAP TECHNOLOGY, INC.', '00C0CF': 'IMATRAN VOIMA OY', '00C07D': 'RISC DEVELOPMENTS LTD.', '00C043': 'STRATACOM', '00C0B5': 'CORPORATE NETWORK SYSTEMS,INC.', '00C0ED': 'US ARMY ELECTRONIC', '00C032': 'I-CUBED LIMITED', '00C0A5': 'DICKENS DATA SYSTEMS', '00C0EF': 'ABIT CORPORATION', '00C061': 'SOLECTEK CORPORATION', '00C0AD': 'MARBEN COMMUNICATION SYSTEMS', '00C07F': 'NUPON COMPUTING CORP.', '00C057': 'MYCO ELECTRONICS', '00C056': 'SOMELEC', '00C027': 'CIPHER SYSTEMS, INC.', '00C05C': 'ELONEX PLC', '00C028': 'JASCO CORPORATION', '00C08D': 'TRONIX PRODUCT DEVELOPMENT', '00C02A': 'OHKURA ELECTRIC CO., LTD.', '00C0FC': 'ELASTIC REALITY, INC.', '00C0BB': 'FORVAL CREATIVE, INC.', '00C0E0': 'DSC COMMUNICATION CORP.', '00C05B': 'NETWORKS NORTHWEST, INC.', '00C008': 'SECO SRL', '00C0B7': 'AMERICAN POWER CONVERSION CORP', '00C0D3': 'OLYMPUS IMAGE SYSTEMS, INC.', '00C0E8': 'PLEXCOM, INC.', '00C0DA': 'NICE SYSTEMS LTD.', '00C0D1': 'COMTREE TECHNOLOGY CORPORATION', '00C038': 'RASTER IMAGE PROCESSING SYSTEM', '00409B': 'HAL COMPUTER SYSTEMS INC.', '0040EB': 'MARTIN MARIETTA CORPORATION', '0040BD': 'STARLIGHT NETWORKS, INC.', '0040ED': "NETWORK CONTROLS INT'NATL INC.", '004021': 'RASTER GRAPHICS', '0040C1': 'BIZERBA-WERKE WILHEIM KRAUT', '0040E1': 'MARNER INTERNATIONAL, INC.', '0040FE': 'SYMPLEX COMMUNICATIONS', '0040E5': 'SYBUS CORPORATION', '0040A5': 'CLINICOMP INTL.', '004005': 'ANI COMMUNICATIONS INC.', '0040D9': 'AMERICAN MEGATRENDS INC.', '00404C': 'HYPERTEC PTY LTD.', '00C030': 'INTEGRATED ENGINEERING B. V.', '00C0A6': 'EXICOM AUSTRALIA PTY. LTD', '00C0CB': 'CONTROL TECHNOLOGY CORPORATION', '00C0EB': 'SEH COMPUTERTECHNIK GMBH', '0040DB': 'ADVANCED TECHNICAL SOLUTIONS', '00C092': 'MENNEN MEDICAL INC.', '00C052': 'BURR-BROWN', '00400E': 'MEMOTEC, INC.', '00C03D': 'WIESEMANN & THEIS GMBH', '0040C8': 'MILAN TECHNOLOGY CORPORATION', '0040BA': 'ALLIANT COMPUTER SYSTEMS CORP.', '004038': 'TALENT ELECTRIC INCORPORATED', '0040D8': 'OCEAN OFFICE AUTOMATION LTD.', '004088': 'MOBIUS TECHNOLOGIES, INC.', '004032': 'DIGITAL COMMUNICATIONS', '0040C2': 'APPLIED COMPUTING DEVICES', '0040D4': 'GAGE TALKER CORP.', '0040CE': 'NET-SOURCE, INC.', '004062': 'E-SYSTEMS, INC./GARLAND DIV.', '004034': 'BUSTEK CORPORATION', '00401C': 'AST RESEARCH, INC.', '00400F': 'DATACOM TECHNOLOGIES', '004006': 'SAMPO TECHNOLOGY CORPORATION', '0080AA': 'MAXPEED', '00C050': 'TOYO DENKI SEIZO K.K.', '0040C6': 'FIBERNET RESEARCH, INC.', '004047': 'WIND RIVER SYSTEMS', '004050': 'IRONICS, INCORPORATED', '008092': 'Silex Technology, Inc.', '008093': 'XYRON CORPORATION', '00805A': "TULIP COMPUTERS INTERNAT'L B.V", '004041': 'FUJIKURA LTD.', '00804E': 'APEX COMPUTER COMPANY', '008055': 'FERMILAB', '00802A': 'TEST SYSTEMS & SIMULATIONS INC', '008035': 'TECHNOLOGY WORKS, INC.', '00807E': 'SOUTHERN PACIFIC LTD.', '0080EF': 'RATIONAL', '0080F0': 'Panasonic Communications Co., Ltd.', '00801D': 'INTEGRATED INFERENCE MACHINES', '008075': 'PARSYTEC GMBH', '008051': 'FIBERMUX', '0080C6': 'NATIONAL DATACOMM CORPORATION', '0080C0': 'PENRIL DATACOMM', '00802E': 'CASTLE ROCK COMPUTING', '0080F2': 'RAYCOM SYSTEMS INC', '0080BD': 'THE FURUKAWA ELECTRIC CO., LTD', '008025': 'Telit Wireless Solutions GmbH', '0080EA': 'ADVA Optical Networking Ltd.', '00001E': 'TELSIST INDUSTRIA ELECTRONICA', '000050': 'RADISYS CORPORATION', '008004': 'ANTLOW COMMUNICATIONS, LTD.', '0080D0': 'COMPUTER PERIPHERALS, INC.', '008024': 'KALPANA, INC.', '008040': 'JOHN FLUKE MANUFACTURING CO.', '008021': 'Alcatel Canada Inc.', '0080E8': 'CUMULUS CORPORATIION', '008069': 'COMPUTONE SYSTEMS', '00800D': 'VOSSWINKEL F.U.', '0080D1': 'KIMTRON CORPORATION', '008042': 'Artesyn Embedded Technologies', '00809A': 'NOVUS NETWORKS LTD', '008000': 'MULTITECH SYSTEMS, INC.', '0080ED': 'IQ TECHNOLOGIES, INC.', '00804A': 'PRO-LOG', '000066': 'TALARIS SYSTEMS, INC.', '000049': 'APRICOT COMPUTERS, LTD', '0000FA': 'MICROSAGE COMPUTER SYSTEMS INC', '0000D4': 'PURE DATA LTD.', '000019': 'APPLIED DYNAMICS INTERNATIONAL', '000015': 'DATAPOINT CORPORATION', '00001C': 'BELL TECHNOLOGIES', '000034': 'NETWORK RESOURCES CORPORATION', '000022': 'VISUAL TECHNOLOGY INC.', '0000B5': 'DATABILITY SOFTWARE SYS. INC.', '00002F': 'TIMEPLEX INC.', '0000B8': 'SEIKOSHA CO., LTD.', '0000E6': 'APTOR PRODUITS DE COMM INDUST', '000084': 'SUPERNET', '00009A': 'RC COMPUTER A/S', '000027': 'JAPAN RADIO COMPANY', '0000E8': 'ACCTON TECHNOLOGY CORP.', '00004B': 'ICL DATA OY', '0000E0': 'QUADRAM CORP.', '0000AB': 'LOGIC MODELING CORPORATION', '0080AC': 'IMLOGIX, DIVISION OF GENESYS', '00004F': 'LOGICRAFT, INC.', '00006F': 'Madge Ltd.', '000078': 'LABTAM LIMITED', '00005A': 'SysKonnect GmbH', '00005B': 'ELTEC ELEKTRONIK AG', '000071': 'ADRA SYSTEMS INC.', '000073': 'SIECOR CORPORATION', '0000B9': 'MCDONNELL DOUGLAS COMPUTER SYS', '0000BF': 'SYMMETRIC COMPUTER SYSTEMS', '00002D': 'CHROMATICS INC', '000018': 'WEBSTER COMPUTER CORPORATION', '0000C8': 'ALTOS COMPUTER SYSTEMS', '0000D5': 'MICROGNOSIS INTERNATIONAL', '00003A': 'CHYRON CORPORATION', '000059': 'Hellige GMBH', '000069': 'CONCORD COMMUNICATIONS INC', '0000E7': 'Star Gate Technologies', '00004D': 'DCI CORPORATION', '000023': 'ABB INDUSTRIAL SYSTEMS AB', '0000BE': 'THE NTI GROUP', '0000D9': 'NIPPON TELEGRAPH & TELEPHONE', '000080': 'CRAY COMMUNICATIONS A/S', '08002A': 'MOSAIC TECHNOLOGIES INC.', '080089': 'Kinetics', '080086': 'KONICA MINOLTA HOLDINGS, INC.', '080083': 'Seiko Instruments Inc.', '080061': 'JAROGATE LTD.', '08005F': 'SABER TECHNOLOGY CORP.', '080058': 'SYSTEMS CONCEPTS', '080049': 'UNIVATION', '080024': '10NET COMMUNICATIONS/DCA', '080022': 'NBI INC.', '080020': 'Oracle Corporation', '08001F': 'SHARP CORPORATION', '080014': 'EXCELAN', 'AA0000': 'DIGITAL EQUIPMENT CORPORATION', 'AA0001': 'DIGITAL EQUIPMENT CORPORATION', 'AA0002': 'DIGITAL EQUIPMENT CORPORATION', '000007': 'XEROX CORPORATION', '00801F': 'KRUPP ATLAS ELECTRONIK GMBH', '080006': 'SIEMENS AG', '04E0C4': 'TRIUMPH-ADLER AG', '020701': 'RACAL-DATACOM', '080013': 'Exxon', '00DD08': 'UNGERMANN-BASS INC.', '000005': 'XEROX CORPORATION', '021C7C': 'PERQ SYSTEMS CORPORATION', '080065': 'GENRAD INC.', '84A9EA': 'Career Technologies USA', 'E405F8': 'Delta Innovation Technology Co., Ltd.', '000009': 'XEROX CORPORATION', '0080E9': 'Madge Ltd.', '0040D6': 'LOCAMATION B.V.', '08004B': 'Planning Research Corp.', '02AA3C': 'OLIVETTI TELECOMM SPA (OLTECO)', '080059': 'A/S MYCRON', '080008': 'BOLT BERANEK AND NEWMAN INC.', 'F47488': 'New H3C Technologies Co., Ltd', 'FCC233': 'ASUSTek COMPUTER INC.', '401175': 'IEEE Registration Authority', '8031F0': 'Samsung Electronics Co.,Ltd', '287FCF': 'Intel Corporate', '583526': 'DEEPLET TECHNOLOGY CORP', '34B5A3': 'CIG SHANGHAI CO LTD', '6C1DEB': 'u-blox AG', '2852F9': 'Zhongxin Intelligent Times (Shenzhen) Co., Ltd.', 'B8F853': 'Arcadyan Corporation', 'E0D083': 'Samsung Electronics Co.,Ltd', '743C18': 'Taicang T&W Electronics', '4C80BA': 'Wuhan Tianyu Information Industry Co., Ltd.', '8C02FA': 'COMMANDO Networks Limited', 'F0264C': 'Sigrist-Photometer AG', 'D03D52': 'Vaion Limited', 'D80B9A': 'Samsung Electronics Co.,Ltd', 'AC8D34': 'HUAWEI TECHNOLOGIES CO.,LTD', '645299': 'The Chamberlain Group, Inc', 'F875A4': 'LCFC(HeFei) Electronics Technology co., ltd', '00D2B1': 'TPV Display Technology (Xiamen) Co.,Ltd.', 'C0E434': 'AzureWave Technology Inc.', '6C710D': 'Cisco Systems, Inc', '246F8C': 'Huawei Device Co., Ltd.', '1C1386': 'Huawei Device Co., Ltd.', 'BC2EF6': 'Huawei Device Co., Ltd.', '4455C4': 'Huawei Device Co., Ltd.', '000829': 'TOKYO ELECTRON DEVICE NAGASAKI LIMITED', '1C4455': 'Sieb & Meyer AG', '803253': 'Intel Corporate', 'F88A5E': 'Texas Instruments', '5CE7A0': 'Nokia', 'E01F88': 'Xiaomi Communications Co Ltd', '8CDC02': 'zte corporation', 'B4BC7C': 'Texas Instruments', 'E0AAB0': 'SUNTAILI ENTERPRISE CO. LTD,', '683943': 'ittim', '10C65E': 'Adapt-IP', '7CA7B0': 'SHENZHEN BILIAN ELECTRONIC CO.,LTD', '20311C': 'vivo Mobile Communication Co., Ltd.', '104F58': 'Aruba, a Hewlett Packard Enterprise Company', 'B4E842': 'Hong Kong Bouffalo Lab Limited', '0003CB': 'SystemGear Co., Ltd.', '506255': 'IEEE Registration Authority', 'DC4BFE': 'Shenzhen Belon Technology CO.,LTD', '80AC7C': 'Sichuan\xa0AI-Link\xa0Technology\xa0Co.,\xa0Ltd.', 'F8D027': 'Seiko Epson Corporation', '5C666C': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '4C4BF9': 'IEEE Registration Authority', '1CEA0B': 'Edgecore Networks Corporation', '24418C': 'Intel Corporate', '44EFBF': 'China Dragon Technology Limited', 'B81F5E': 'Apption Labs Limited', 'D81265': 'CHONGQING FUGUI ELECTRONICS CO.,LTD.', '4CB44A': 'NANOWAVE Technologies Inc.', '048C9A': 'Huawei Device Co., Ltd.', '60F262': 'Intel Corporate', 'EC3CBB': 'Huawei Device Co., Ltd.', '8C3A7E': 'Universal Electronics, Inc.', '70441C': 'SHENZHEN KAIFA TECHNOLOGY CO.,LTD.', 'B47C59': 'Jiangsu Hengxin Technology Co.,Ltd.', '300D9E': 'Ruijie Networks Co.,LTD', 'F8B46A': 'Hewlett Packard', 'ECFA5C': 'Beijing Xiaomi Electronics Co., Ltd.', 'BCB0E7': 'HUAWEI TECHNOLOGIES CO.,LTD', '5434EF': 'HUAWEI TECHNOLOGIES CO.,LTD', '88D5A8': 'ITEL MOBILE LIMITED', '208593': 'IEEE Registration Authority', 'ACE342': 'HUAWEI TECHNOLOGIES CO.,LTD', '9017C8': 'HUAWEI TECHNOLOGIES CO.,LTD', 'E4922A': 'DBG HOLDINGS LIMITED', '2C641F': 'Vizio, Inc', '207759': 'OPTICAL NETWORK VIDEO TECHNOLOGIES (SHENZHEN) CO., LTD.', '54E7D5': 'Sun Cupid Technology (HK) LTD', '189088': 'eero inc.', '4C56DF': 'Targus US LLC', '241510': 'IEEE Registration Authority', '6C4D51': 'Shenzhen Ceres Technology Co., Ltd.', '889D98': 'Allied-telesisK.K.', 'DCF8B9': 'zte corporation', '18BF1C': 'Jiangsu Huitong Group Co.,Ltd.', 'ACDE48': 'Private', '0050C7': 'Private', '002067': 'Private', 'B4EE25': 'Shenzhen Belon Technology CO.,LTD', 'C82B96': 'Espressif Inc.', '98523D': 'Sunitec Enterprise Co.,Ltd', 'D015A6': 'Aruba, a Hewlett Packard Enterprise Company', '000163': 'Cisco Systems, Inc', '04819B': 'BSkyB Ltd', '4801C5': 'OnePlus Technology (Shenzhen) Co., Ltd', '60634C': 'D-Link International', '205F3D': 'Cambridge Communication Systems Ltd', 'E00084': 'HUAWEI TECHNOLOGIES CO.,LTD', '2CA89C': 'Creatz inc.', '4CDC0D': 'Coral Telecom Limited', '004E01': 'Dell Inc.', 'C4E1A1': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'ACC358': 'Continental Automotive Czech Republic s.r.o.', '3CECEF': 'Super Micro Computer, Inc.', '1855E3': 'Apple, Inc.', 'E450EB': 'Apple, Inc.', '886440': 'Apple, Inc.', '6070C0': 'Apple, Inc.', 'F0C371': 'Apple, Inc.', '64FF0A': 'Wistron Neweb Corporation', 'F09919': 'Garmin International', 'F43E66': 'Bee Computing (HK) Limited', 'DC396F': 'AVM Audiovisuelles Marketing und Computersysteme GmbH', 'B4C476': 'Wuhan Maritime Communication Research Institute', '683489': 'LEA Professional', 'D04E50': 'Mobiwire Mobiles (NingBo) Co., LTD', 'B46C47': 'Panasonic Appliances Company', '44422F': 'TESTOP CO.,LTD.', '549C27': 'Plasma Cloud Limited', '94BF80': 'zte corporation', '987A14': 'Microsoft Corporation', 'C83DDC': 'Xiaomi Communications Co Ltd', '9C3A9A': 'Shenzhen Sundray Technologies Company Limited', '541589': 'MCS Logic Inc.', '845733': 'Microsoft Corporation', '0000DE': 'CETIA', 'B0B5E8': 'Ruroc LTD', '04D590': 'Fortinet, Inc.', '00AD63': 'Dedicated Micros Malta LTD', '58F39C': 'Cisco Systems, Inc', '002423': 'AzureWave Technologies (Shanghai) Inc.', '6029D5': 'DAVOLINK Inc.', '509744': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', 'C4411E': 'Belkin International Inc.', '0077E4': 'Nokia', '8C593C': 'IEEE Registration Authority', 'E415F6': 'Texas Instruments', 'F41D6B': 'HUAWEI TECHNOLOGIES CO.,LTD', '7CEC9B': 'Fuzhou Teraway Information Technology Co.,Ltd', 'CC9070': 'Cisco Systems, Inc', '2841C6': 'HUAWEI TECHNOLOGIES CO.,LTD', 'AC4228': 'Parta Networks', '380118': 'ULVAC,Inc.', '14ADCA': 'China Mobile Iot Limited company', '809133': 'AzureWave Technology Inc.', 'B4F58E': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C48FC1': 'DEEPTRACK S.L.U.', 'E828C1': 'Eltex Enterprise Ltd.', '78D347': 'Ericsson AB', 'F82387': 'Shenzhen Horn Audio Co.,Ltd.', 'A4A179': 'Nanjing dianyan electric power automation co. LTD', '68DB67': 'Nantong Coship Electronics Co., Ltd.', '1819D6': 'Samsung Electronics Co.,Ltd', 'BC98DF': 'Motorola Mobility LLC, a Lenovo Company', '70FC8F': 'FREEBOX SAS', '501B32': 'Taicang T&W Electronics', '980D67': 'Zyxel Communications Corporation', '0002D8': 'BRECIS Communications Corporation', 'ACF5E6': 'Cisco Systems, Inc', '782C29': 'New H3C Technologies Co., Ltd', 'DCB082': 'Nokia', 'F8C397': 'NZXT Corp. Ltd.', '70DDA8': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '4C6F9C': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'D4D252': 'Intel Corporate', '58A023': 'Intel Corporate', '702E80': 'DIEHL Connectivity Solutions', 'BC9FE4': 'Aruba, a Hewlett Packard Enterprise Company', 'B0A6F5': 'Xaptum, Inc.', '109397': 'ARRIS Group, Inc.', '5075F1': 'ARRIS Group, Inc.', 'E4F3E8': 'Shenzhen SuperElectron Technology Co.,Ltd.', 'CCA12B': 'TCL King Electrical Appliances (Huizhou) Co., Ltd', 'ACA46E': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', 'C8B422': 'ASKEY COMPUTER CORP', '94EE9F': 'HMD Global Oy', '001EA3': 'Nokia Danmark A/S', '38F32E': 'Skullcandy', 'DC2AA1': 'MedHab LLC', 'F8B797': 'NEC Platforms, Ltd.', 'B0AAD2': 'Sichuan tianyi kanghe communications co., LTD', 'F4323D': 'Sichuan tianyi kanghe communications co., LTD', 'AC00D0': 'zte corporation', 'E8C417': 'Fiberhome Telecommunication Technologies Co.,LTD', '18BC5A': 'Zhejiang Tmall Technology Co., Ltd.', 'C4C138': 'OWLink Technology Inc', '981E19': 'Sagemcom Broadband SAS', '84B866': 'Beijing XiaoLu technology co. LTD', '205869': 'Ruckus Wireless', 'CC37AB': 'Edgecore Networks Corporation', '907841': 'Intel Corporate', 'C46516': 'Hewlett Packard', 'E41E0A': 'IEEE Registration Authority', 'AC37C9': 'RAID Incorporated', 'C86314': 'IEEE Registration Authority', '1422DB': 'eero inc.', '243154': 'HUAWEI TECHNOLOGIES CO.,LTD', '84B8B8': 'Motorola (Wuhan) Mobility Technologies Communication Co., Ltd.', '10A3B8': 'Iskratel d.o.o.', '70CD91': 'TERACOM TELEMATICA S.A', 'D041C9': 'Fiberhome Telecommunication Technologies Co.,LTD', 'E8018D': 'Fiberhome Telecommunication Technologies Co.,LTD', '18399C': 'Skorpios Technologies', '94C2BD': 'TECNOBIT', 'E8ECA3': 'Dongguan Liesheng Electronic Co.Ltd', '08A6BC': 'Amazon Technologies Inc.', '2C58E8': 'HUAWEI TECHNOLOGIES CO.,LTD', '4883B4': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '701E68': 'Hanna Instruments, Inc.', '1CB3E9': ' Shenzhen Zhongke United Communication Technology ', 'C0074A': 'Brita GmbH', 'E8B2FE': 'HUMAX Co., Ltd.', '2C1875': 'Skyworth Digital Technology(Shenzhen) Co.,Ltd', 'D06EDE': 'Sagemcom Broadband SAS', 'B0BB8B': 'WAVETEL TECHNOLOGY LIMITED', '0017FA': 'Microsoft Corporation', '94BFC4': 'Ruckus Wireless', '543B30': 'duagon AG', '40F21C': 'DASAN Zhone Solutions', '8C965F': 'Shandong Zhongan Technology Co., Ltd.', 'F4AFE7': 'Apple, Inc.', 'AC88FD': 'Apple, Inc.', '941625': 'Apple, Inc.', '34A8EB': 'Apple, Inc.', 'A483E7': 'Apple, Inc.', '243F30': 'Oxygen Broadband s.a.', '3C9180': 'Liteon Technology Corporation', '80FD7A': 'BLU Products Inc', 'B4A305': 'XIAMEN YAXON NETWORK CO., LTD.', '803E48': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', '24586E': 'zte corporation', '20326C': 'Samsung Electronics Co.,Ltd', '6489F1': 'Samsung Electronics Co.,Ltd', '2034FB': 'Xiaomi Communications Co Ltd', 'A89CED': 'Xiaomi Communications Co Ltd', '6CA604': 'ARRIS Group, Inc.', '503E7C': 'LeiShen Intelligent System Co.Ltd', '5CF9DD': 'Dell Inc.', 'D0EC35': 'Cisco Systems, Inc', '10AE60': 'Private', '0025DF': 'Private', 'BCCF4F': 'Zyxel Communications Corporation', '0CE041': 'iDruide', 'B88FB4': 'JABIL CIRCUIT ITALIA S.R.L', 'C010B1': 'HMD Global Oy', '90895F': 'WEIFANG GOERTEK ELECTRONICS CO.,LTD', '48D845': 'Shenzhen Mainuoke Electronics Co., Ltd', '0052C2': 'peiker acustic GmbH', '48BD0E': 'Quanta Storage Inc.', '00124E': 'XAC AUTOMATION CORP.', '88E034': 'Shinwa industries(China) ltd.', '380025': 'Intel Corporate', 'D45383': 'Murata Manufacturing Co., Ltd.', 'A04246': 'IT Telecom Co., Ltd.', '68FF7B': 'TP-LINK TECHNOLOGIES CO.,LTD.', '808F1D': 'TP-LINK TECHNOLOGIES CO.,LTD.', '0CF475': 'Zliide Technologies ApS', '000F69': 'SEW Eurodrive GmbH & Co. KG', '8C53D2': 'China Mobile Group Device Co.,Ltd.', 'E458E7': 'Samsung Electronics Co.,Ltd', '00104A': 'The Parvus Corporation', 'D058C0': 'Qingdao Haier Multimedia Limited. ', 'F8D478': 'Flextronics Tech.(Ind) Pvt Ltd', '3821C7': 'Aruba, a Hewlett Packard Enterprise Company', '48C3B0': 'Pharos Co.Ltd', 'DC58BC': 'Thomas-Krenn.AG', '001025': 'Grayhill, Inc', '70EA1A': 'Cisco Systems, Inc', '9844B6': 'INFRANOR SAS', '38839A': 'SHENZHEN RF-LINK TECHNOLOGY CO.,LTD.', 'C8F6C8': 'Fiberhome Telecommunication Technologies Co.,LTD', '808A8B': 'vivo Mobile Communication Co., Ltd.', 'DC6723': 'barox Kommunikation GmbH', '44B462': 'Flextronics Tech.(Ind) Pvt Ltd', '94B40F': 'Aruba, a Hewlett Packard Enterprise Company', '001A1E': 'Aruba, a Hewlett Packard Enterprise Company', '00246C': 'Aruba, a Hewlett Packard Enterprise Company', '201BC9': 'Juniper Networks', 'A45F9B': 'Nexell', 'ECC57F': 'Suzhou Pairlink Network Technology', 'A86DAA': 'Intel Corporate', '38C2BA': 'CCTV NEOTECH', 'A0F9B7': 'Ademco Smart Homes Technology(Tianjin)Co.,Ltd.', '1C3B8F': 'Selve GmbH & Co. KG', 'E4E749': 'Hewlett Packard', 'A83CCB': 'ROSSMA', '886FD4': 'Dell Inc.', 'CC3FEA': 'BAE Systems, Inc', 'E85BB7': 'Ample Systems Inc.', '94677E': 'Belden India Private Limited', '50AD92': 'NX Technologies', '4CF2BF': 'Cambridge Industries(Group) Co.,Ltd.', 'AC4330': 'Versa Networks', 'D43A2E': 'SHENZHEN MTC CO LTD', 'AC5775': 'HMD Global Oy', '30C3D9': 'ALPS ELECTRIC CO., LTD.', 'CC9EA2': 'Amazon Technologies Inc.', '003217': 'Cisco Systems, Inc', '001BFB': 'ALPS ELECTRIC CO., LTD.', '6CA936': 'DisplayLink (UK) Ltd', '708540': 'Skyworth Digital Technology(Shenzhen) Co.,Ltd', '08F1EA': 'Hewlett Packard Enterprise', '7CDB98': 'ASKEY COMPUTER CORP', '78B213': 'DWnet Technologies(Suzhou) Corporation', 'F00DF5': 'ACOMA Medical Industry Co,. Ltd.', '58C232': 'NEC Corporation', '94E0D6': 'China Dragon Technology Limited', 'B4A9FC': 'Quanta Computer Inc.', '380B3C': 'Texas Instruments', '6845F1': 'TOSHIBA CLIENT SOLUTIONS CO., LTD.', '8CAEDB': 'NAG LLC', '381D14': 'Skydio Inc.', '3C286D': 'Google, Inc.', '00093A': 'Molex CMS', 'E82C6D': 'SmartRG, Inc.', '04F9D9': 'Speaker Electronic(Jiashan) Co.,Ltd', '9C8CD8': 'Hewlett Packard Enterprise', 'A48CC0': 'JLG Industries, Inc.', '74F737': 'KCE', '48A493': 'TAIYO YUDEN CO.,LTD', '88D211': 'Eko Devices, Inc.', 'DC080F': 'Apple, Inc.', 'F8E94E': 'Apple, Inc.', 'EC2CE2': 'Apple, Inc.', '40BC60': 'Apple, Inc.', 'E83617': 'Apple, Inc.', '9C648B': 'Apple, Inc.', 'B8C227': 'PSTec', '48E695': 'Insigma Inc', 'B479C8': 'Ruckus Wireless', 'B40B78': 'Brusa Elektronik AG', '207918': 'Intel Corporate', '0C9541': 'CHIPSEA TECHNOLOGIES (SHENZHEN) CORP.', 'DC48B2': 'Baraja Pty. Ltd.', 'ACAE19': 'Roku, Inc', '2CAA8E': 'Wyze Labs Inc', '703A51': 'Xiaomi Communications Co Ltd', '344262': 'Apple, Inc.', '14D00D': 'Apple, Inc.', 'C03DD9': 'MitraStar Technology Corp.', '181E95': 'AuVerte', 'ACF85C': 'Private', '9C6937': 'Qorvo Utrecht B.V.', '001060': 'BILLIONTON SYSTEMS, INC.', 'C4D489': 'JiangSu Joyque Information Industry Co.,Ltd', '1012B4': 'SICHUAN TIANYI COMHEART TELECOM CO.,LTD', 'C0BDC8': 'Samsung Electronics Co.,Ltd', '647BCE': 'Samsung Electronics Co.,Ltd', 'A887B3': 'Samsung Electronics Co.,Ltd', '6C006B': 'Samsung Electronics Co.,Ltd', '6CC7EC': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', '04E598': 'Xiaomi Communications Co Ltd', 'E43C80': 'University of Oklahoma', 'B82CA0': 'Resideo', 'B033A6': 'Juniper Networks', 'D89685': 'GoPro', '3C3786': 'NETGEAR', '3C2C30': 'Dell Inc.', '48352E': 'Shenzhen Wolck Network Product Co.,LTD', 'B4F949': 'optilink networks pvt ltd', 'A4A1E4': 'Innotube, Inc.', '98D3E7': 'Netafim L', '94EAEA': 'TELLESCOM INDUSTRIA E COMERCIO EM TELECOMUNICACAO ', 'A0A4C5': 'Intel Corporate', 'F4D108': 'Intel Corporate', '60CE92': 'The Refined Industry Company Limited', '1CFD08': 'IEEE Registration Authority', '301389': 'Siemens AG, Automations & Drives,', '3C01EF': 'Sony Mobile Communications Inc', '706D15': 'Cisco Systems, Inc', 'B8599F': 'Mellanox Technologies, Inc.', '046B25': 'SICHUAN TIANYI COMHEART TELECOM CO.,LTD', '00AD24': 'D-Link International', '54068B': 'Ningbo Deli Kebei Technology Co.LTD', '549FAE': 'iBASE Gaming Inc', '7CC385': 'HUAWEI TECHNOLOGIES CO.,LTD', '900EB3': 'Shenzhen Amediatech Technology Co., Ltd.', '20B001': 'Technicolor', 'C0BFA7': 'Juniper Networks', '548028': 'Hewlett Packard Enterprise', 'F05494': 'Honeywell Connected Building', '48872D': 'SHEN ZHEN DA XIA LONG QUE TECHNOLOGY CO.,LTD', '105BAD': 'Mega Well Limited', '74BFC0': 'CANON INC.', 'F063F9': 'HUAWEI TECHNOLOGIES CO.,LTD', '1469A2': 'SICHUAN TIANYI COMHEART TELECOM CO.,LTD', 'C8D9D2': 'Hewlett Packard', '24FCE5': 'Samsung Electronics Co.,Ltd', '809621': 'Lenovo', 'B02A43': 'Google, Inc.', '181DEA': 'Intel Corporate', '185680': 'Intel Corporate', '78055F': 'Shenzhen WYC Technology Co., Ltd.', '00EABD': 'Cisco Systems, Inc', '5CC999': 'New H3C Technologies Co., Ltd', 'E81A58': 'TECHNOLOGIC SYSTEMS', 'C474F8': 'Hot Pepper, Inc.', 'BCB22B': 'EM-Tech', '704FB8': 'ARRIS Group, Inc.', '0060EB': 'FOURTHTRACK SYSTEMS', 'EC79F2': 'Startel', '04BC87': 'Shenzhen JustLink Technology Co., LTD', '54C33E': 'Ciena Corporation', '142233': 'Fiberhome Telecommunication Technologies Co.,LTD', '743400': 'MTG Co., Ltd.', 'DC3757': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '1862E4': 'Texas Instruments', 'D4AB82': 'ARRIS Group, Inc.', '6035C0': 'SFR', '005099': '3COM EUROPE LTD', 'B08BCF': 'Cisco Systems, Inc', 'C4985C': 'Hui Zhou Gaoshengda Technology Co.,LTD', '30A1FA': 'HUAWEI TECHNOLOGIES CO.,LTD', '242E90': 'PALIT MICROSYSTEMS, LTD', '64628A': 'evon GmbH', '0415D9': 'Viwone', 'ECB313': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', '001AC5': 'Keysight Technologies, Inc.', '00201E': 'NETQUEST CORPORATION', '00608C': '3COM', '00A024': '3COM', '0020AF': '3COM', '00104B': '3COM', '9CAA1B': 'Microsoft Corporation', 'A89A93': 'Sagemcom Broadband SAS', '8C9246': 'Oerlikon Textile Gmbh&Co.KG', '645D86': 'Intel Corporate', 'A85AF3': 'Shanghai Siflower Communication Technology Co., Ltd', '70FD46': 'Samsung Electronics Co.,Ltd', 'E00EE1': 'We Corporation Inc.', '000E94': 'Maas International BV', '000C43': 'Ralink Technology, Corp.', '0C9D92': 'ASUSTek COMPUTER INC.', '0CCB85': 'Motorola Mobility LLC, a Lenovo Company', '001A31': 'SCAN COIN AB', '001B84': 'Scan Engineering Telecom', '3412F9': 'HUAWEI TECHNOLOGIES CO.,LTD', 'BCE265': 'HUAWEI TECHNOLOGIES CO.,LTD', '4CD1A1': 'HUAWEI TECHNOLOGIES CO.,LTD', '88BFE4': 'HUAWEI TECHNOLOGIES CO.,LTD', '8C83E1': 'Samsung Electronics Co.,Ltd', '889F6F': 'Samsung Electronics Co.,Ltd', '5C63C9': 'Intellithings Ltd.', '0C96E6': 'Cloud Network Technology (Samoa) Limited', '4017E2': 'INTAI TECHNOLOGY CORP.', '4898CA': 'Sichuan\xa0AI-Link\xa0Technology\xa0Co.,\xa0Ltd.', '247E51': 'zte corporation', 'E8B541': 'zte corporation', 'F4068D': 'devolo AG', '988ED4': 'ITEL MOBILE LIMITED', 'E8A788': 'XIAMEN LEELEN TECHNOLOGY CO., LTD', '3C8994': 'BSkyB Ltd', '582D34': 'Qingping Electronics (Suzhou) Co., Ltd', '20DE88': 'IC Realtime LLC', '741F79': 'YOUNGKOOK ELECTRONICS CO.,LTD', '482CA0': 'Xiaomi Communications Co Ltd', 'A4E615': 'SHENZHEN CHUANGWEI-RGB ELECTRONICS CO.,LTD', 'B4CEFE': 'James Czekaj', '88AE1D': 'COMPAL INFORMATION (KUNSHAN) CO., LTD. ', 'B888E3': 'COMPAL INFORMATION (KUNSHAN) CO., LTD. ', 'F8369B': 'Texas Instruments', 'A09351': 'Cisco Systems, Inc', '208984': 'COMPAL INFORMATION (KUNSHAN) CO., LTD. ', 'F8CC6E': 'DEPO Electronics Ltd', '782F17': 'Xlab Co.,Ltd', 'B0027E': 'MULLER SERVICES', '24FAF3': 'Shanghai Flexem Technology Co.,Ltd.', '88D2BF': 'German Autolabs', '20163D': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', 'C4518D': 'Shenzhen YOUHUA Technology Co., Ltd', '486834': 'Silicon Motion, Inc.', '001133': 'Siemens AG Austria', 'E0735F': 'NUCOM', '0051ED': 'LG Innotek', '98039B': 'Mellanox Technologies, Inc.', '40DC9D': 'HAJEN', '641331': 'Bosch Car Multimedia (Wuhu) Co. Ltd.', '183A48': 'VostroNet', '340A98': 'HUAWEI TECHNOLOGIES CO.,LTD', '646D6C': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C4B8B4': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F0BCC9': 'PFU LIMITED', '000B23': 'Siemens Home & Office Comm. Devices', '289EFC': 'Sagemcom Broadband SAS', '00C055': 'MODULAR COMPUTING TECHNOLOGIES', 'E41FE9': 'Dunkermotoren GmbH', '904C81': 'Hewlett Packard Enterprise', '8C3579': 'QDIQO Sp. z o.o.', '38C70A': 'WiFiSong', '5C2ED2': 'ABC(XiSheng) Electronics Co.,Ltd', '9C5A44': 'COMPAL INFORMATION (KUNSHAN) CO., LTD. ', '487583': 'Intellion AG', '007E95': 'Cisco Systems, Inc', 'FCFBFB': 'Cisco Systems, Inc', 'D8760A': 'Escort, Inc.', '14CAA0': 'Hu&Co', 'D82477': 'Universal Electric Corporation', '00907F': 'WatchGuard Technologies, Inc.', '4C5E0C': 'Routerboard.com', '001472': 'China Broadband Wireless IP Standard group(ChinaBWIPS)', 'F4EE14': 'MERCURY COMMUNICATION TECHNOLOGIES CO.,LTD.', '6C5940': 'MERCURY COMMUNICATION TECHNOLOGIES CO.,LTD.', 'D02516': 'MERCURY COMMUNICATION TECHNOLOGIES CO.,LTD.', '1C60DE': 'MERCURY COMMUNICATION TECHNOLOGIES CO.,LTD.', '4006A0': 'Texas Instruments', '683A1E': 'Cisco Meraki', '001017': 'Bosch Access Systems GmbH', '80B575': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A4BE2B': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D4CA6D': 'Routerboard.com', '08D46A': 'LG Electronics (Mobile Communications)', 'DCB4AC': 'FLEXTRONICS MANUFACTURING(ZHUHAI)CO.,LTD.', '64A2F9': 'OnePlus Technology (Shenzhen) Co., Ltd', 'A87D12': 'HUAWEI TECHNOLOGIES CO.,LTD', '203DBD': 'LG Innotek', 'A492CB': 'Nokia', 'C0D2F3': 'Hui Zhou Gaoshengda Technology Co.,LTD', '58B10F': 'Samsung Electronics Co.,Ltd', 'A45385': 'WEIFANG GOERTEK ELECTRONICS CO.,LTD', '00402F': 'XLNT DESIGNS INC.', '2811A5': 'Bose Corporation', 'D8F3DB': 'Post CH AG', '04ECBB': 'Fiberhome Telecommunication Technologies Co.,LTD', 'D42122': 'Sercomm Corporation.', '00C002': 'Sercomm Corporation.', '68E7C2': 'Samsung Electronics Co.,Ltd', 'B88303': 'Hewlett Packard Enterprise', 'BC9911': 'Zyxel Communications Corporation', '280245': 'Konze System Technology Co.,Ltd.', 'E48F65': 'Yelatma Instrument Making Enterprise, JSC', '840D8E': 'Espressif Inc.', '3CF5CC': 'New H3C Technologies Co., Ltd', 'D08A91': 'Technicolor CH USA Inc.', '00BB3A': 'Amazon Technologies Inc.', 'C08135': 'Ningbo Forfan technology Co., LTD', 'F82DC0': 'ARRIS Group, Inc.', '189C27': 'ARRIS Group, Inc.', '0CB34F': 'Shenzhen Xiaoqi Intelligent Technology Co., Ltd.', '3CF4F9': 'Moda-InnoChips', '94193A': 'Elvaco AB', '5050CE': 'Hangzhou Dianyixia Communication Technology Co. Ltd.', '2C28B7': 'Hangzhou Ruiying technology co., LTD', '106530': 'Dell Inc.', '88D37B': 'FirmTek, LLC', 'B4C0F5': 'Shenzhen TINNO Mobile Technology Corp.', '406231': 'GIFA', 'FCB7F0': 'Idaho National Laboratory', '046B1B': 'SYSDINE Co., Ltd.', 'E8FAF7': 'Guangdong Uniteddata Holding Group Co., Ltd.', '949D57': 'Panasonic do Brasil Limitada', '002082': 'ONEAC CORPORATION', '0000A8': 'Stratus Technologies', '0004FC': 'Stratus Technologies', '3C24F0': 'IEEE Registration Authority', '58DB15': 'TECNO MOBILE LIMITED', '1C666D': 'Hon Hai Precision Ind. Co.,Ltd.', 'FC6947': 'Texas Instruments', 'E07DEA': 'Texas Instruments', '3CEAF9': 'JUBIXCOLTD', '682C7B': 'Cisco Systems, Inc', '441E98': 'Ruckus Wireless', 'DC2919': 'AltoBeam (Xiamen) Technology Ltd, Co.', '645AED': 'Apple, Inc.', '84F3EB': 'Espressif Inc.', 'DCDE4F': 'Gionee Communication Equipment Co Ltd ', '4CD0CB': 'HUAWEI TECHNOLOGIES CO.,LTD', '505DAC': 'HUAWEI TECHNOLOGIES CO.,LTD', '04D3B0': 'Intel Corporate', '50BC96': 'Apple, Inc.', 'FC2A9C': 'Apple, Inc.', 'A056F3': 'Apple, Inc.', 'C0B658': 'Apple, Inc.', '48A91C': 'Apple, Inc.', 'B0A37E': 'QING DAO HAIER TELECOM CO.,LTD.', '001B48': 'Shenzhen Lantech Electronics Co., Ltd.', '64C3D6': 'Juniper Networks', '885FE8': 'IEEE Registration Authority', '00250C': 'Senet Inc', '0C8063': 'TP-LINK TECHNOLOGIES CO.,LTD.', '007278': 'Cisco Systems, Inc', '00A021': 'General Dynamics Mission Systems', '002FD9': 'Fiberhome Telecommunication Technologies Co.,LTD', 'B4CD27': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C819F7': 'Samsung Electronics Co.,Ltd', '549963': 'Apple, Inc.', '90DD5D': 'Apple, Inc.', 'F0AF50': 'Phantom Intelligence', '0C08B4': 'HUMAX Co., Ltd.', '3880DF': 'Motorola Mobility LLC, a Lenovo Company', 'BC6A2F': 'Henge Docks LLC', '002705': 'Sectronic', '48BD3D': 'New H3C Technologies Co., Ltd', 'F4E11E': 'Texas Instruments', '184C08': 'Rockwell Automation', '180F76': 'D-Link International', 'DC0265': 'Meditech Kft', 'C42C4F': 'Qingdao Hisense Mobile Communication Technology Co,Ltd', '24CACB': 'Fiberhome Telecommunication Technologies Co.,LTD', '543E64': 'Fiberhome Telecommunication Technologies Co.,LTD', '6402CB': 'ARRIS Group, Inc.', '909497': 'HUAWEI TECHNOLOGIES CO.,LTD', 'DC729B': 'HUAWEI TECHNOLOGIES CO.,LTD', '34029B': 'Plexonics Technologies LImited', '84DB9E': 'Aifloo AB', 'A4DA22': 'IEEE Registration Authority', '900372': 'Longnan Junya Digital Technology Co. Ltd. ', '74E182': 'Texas Instruments', '14A72B': 'currentoptronics Pvt.Ltd', '3C1710': 'Sagemcom Broadband SAS', '8C4CAD': 'Evoluzn Inc.', '4C776D': 'Cisco Systems, Inc', '3CDCBC': 'Samsung Electronics Co.,Ltd', '804E70': 'Samsung Electronics Co.,Ltd', 'D4E6B7': 'Samsung Electronics Co.,Ltd', '8CF957': 'RuiXingHengFang Network (Shenzhen) Co.,Ltd', '0CC6CC': 'HUAWEI TECHNOLOGIES CO.,LTD', '94FE9D': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', '6CB6CA': 'DIVUS GmbH', '6CC4D5': 'HMD Global Oy', '80C548': 'Shenzhen Zowee Technology Co.,Ltd', '10C25A': 'Technicolor CH USA Inc.', '785860': 'HUAWEI TECHNOLOGIES CO.,LTD', 'E8ABF3': 'HUAWEI TECHNOLOGIES CO.,LTD', '449EF9': 'vivo Mobile Communication Co., Ltd.', '3499D7': 'Universal Flow Monitors, Inc.', '0C8BD3': 'ITEL MOBILE LIMITED', 'E8DEFB': 'MESOTIC SAS', 'C400AD': 'Advantech Technology (CHINA) Co., Ltd.', '04D13A': 'Xiaomi Communications Co Ltd', '282C02': 'IEEE Registration Authority', '583BD9': 'Fiberhome Telecommunication Technologies Co.,LTD', 'DCA266': 'Hon Hai Precision Ind. Co.,Ltd.', '0024AF': 'Dish Technologies Corp', 'C0A8F0': 'Adamson Systems Engineering', '9C431E': 'IEEE Registration Authority', '4CC206': 'Somfy', '040973': 'Hewlett Packard Enterprise', '347C25': 'Apple, Inc.', 'CC2DB7': 'Apple, Inc.', 'C48466': 'Apple, Inc.', 'FCA183': 'Amazon Technologies Inc.', '6C2ACB': 'Paxton Access Ltd', 'D8B122': 'Juniper Networks', '74EACB': 'New H3C Technologies Co., Ltd', '980074': 'Raisecom Technology CO., LTD', '18C19D': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '0C9838': 'Xiaomi Communications Co Ltd', 'A0BDCD': 'BSkyB Ltd', 'B4C799': 'Extreme Networks, Inc.', '44AD19': 'XINGFEI (H.K)LIMITED ', '000F9B': 'Ross Video Limited', '0024BA': 'Texas Instruments', '60512C': 'TCT mobile ltd', '64DB81': 'Syszone Co., Ltd.', '5C865C': 'Samsung Electronics Co.,Ltd', '04F128': 'HMD Global Oy', '38ADBE': 'New H3C Technologies Co., Ltd', '04B167': 'Xiaomi Communications Co Ltd', 'BC91B5': 'Infinix mobility limited', 'D41A3F': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '30C507': 'ECI Telecom Ltd.', '282FC2': 'Automotive Data Solutions', '646E6C': 'Radio Datacom LLC', 'E88E60': 'NSD Corporation', 'D007CA': 'Juniper Networks', '3817C3': 'Hewlett Packard Enterprise', '0C5203': 'AGM GROUP LIMITED', '2C5491': 'Microsoft Corporation', 'C8458F': 'Wyler AG', '001BB9': 'Elitegroup Computer Systems Co.,Ltd.', '002461': 'Shin Wang Tech.', 'E4A7A0': 'Intel Corporate', 'EC8350': 'Microsoft Corporation', '5CAD76': 'Shenzhen TCL New Technology Co., Ltd', '1CDF52': 'Texas Instruments', '001E1D': 'East Coast Datacom, Inc.', '38D7CA': '7HUGS LABS', '5CE28C': 'Zyxel Communications Corporation', 'E4BD4B': 'zte corporation', '7846C4': 'DAEHAP HYPER-TECH', '000144': 'Dell EMC', '08001B': 'Dell EMC', '7C010A': 'Texas Instruments', 'E42B34': 'Apple, Inc.', '3C2EF9': 'Apple, Inc.', 'A04EA7': 'Apple, Inc.', 'F0989D': 'Apple, Inc.', 'FCD6BD': 'Robert Bosch GmbH', '701F53': 'Cisco Systems, Inc', '18396E': 'SUNSEA TELECOMMUNICATIONS CO.,LTD.', 'EC7D11': 'vivo Mobile Communication Co., Ltd.', '480EEC': 'TP-LINK TECHNOLOGIES CO.,LTD.', '503EAA': 'TP-LINK TECHNOLOGIES CO.,LTD.', '5800BB': 'Juniper Networks', 'D80831': 'Samsung Electronics Co.,Ltd', '9441C1': 'Mini-Cam Limited', '48BA4E': 'Hewlett Packard', '683C7D': 'Magic Intelligence Technology Limited', '74BBD3': 'Shenzhen xeme Communication Co., Ltd.', '500F80': 'Cisco Systems, Inc', '504EDC': 'Ping Communication', '08674E': 'Hisense broadband multimedia technology Co.,Ltd', '10F1F2': 'LG Electronics (Mobile Communications)', '8C68C8': 'zte corporation', '0C1C20': 'Kakao Corp', '24F5A2': 'Belkin International Inc.', '94282E': 'New H3C Technologies Co., Ltd', '4CBD8F': 'Hangzhou Hikvision Digital Technology Co.,Ltd.', 'ECEBB8': 'Hewlett Packard Enterprise', 'EC8263': 'zte corporation', '6CB749': 'HUAWEI TECHNOLOGIES CO.,LTD', '989C57': 'HUAWEI TECHNOLOGIES CO.,LTD', '185282': 'Fiberhome Telecommunication Technologies Co.,LTD', '8C3C4A': 'NAKAYO Inc', '7086C1': 'Texas Instruments', '98F5A9': 'OHSUNG', 'D86162': 'Wistron Neweb Corporation', 'F4D7B2': 'LGS Innovations, LLC', '00152A': 'Nokia Corporation', 'D89EF3': 'Dell Inc.', '384F49': 'Juniper Networks', '28840E': 'silicon valley immigration service ', '80615F': 'Beijing Sinead Technology Co., Ltd. ', '9C4A7B': 'Nokia Corporation', '2CD2E7': 'Nokia Corporation', '386EA2': 'vivo Mobile Communication Co., Ltd.', '982D68': 'Samsung Electronics Co., Ltd', 'BC2E48': 'ARRIS Group, Inc.', '608CE6': 'ARRIS Group, Inc.', '080070': 'Mitsubishi Precision Co.,LTd.', '444AB0': 'Zhejiang Moorgen Intelligence Technology Co., Ltd', '48EC5B': 'Nokia', 'DC6AEA': 'Infinix mobility limited', 'C421C8': 'KYOCERA CORPORATION ', '80739F': 'KYOCERA CORPORATION ', 'D8B12A': 'Panasonic Mobile Communications Co.,Ltd.', '705812': 'Panasonic Corporation AVC Networks Company', '04209A': 'Panasonic Corporation AVC Networks Company', '34008A': 'IEEE Registration Authority', 'A41115': 'Robert Bosch Engineering and Business Solutions pvt. Ltd.', '40D63C': 'Equitech Industrial(DongGuan)Co.,Ltd', 'F4F3AA': 'JBL GmbH & Co. KG', '40A3CC': 'Intel Corporate', '9050CA': 'Hitron Technologies. Inc', '409922': 'AzureWave Technology Inc.', 'C06D1A': 'Tianjin Henxinhuifeng Technology Co.,Ltd.', '107B44': 'ASUSTek COMPUTER INC.', '84253F': 'silex technology, Inc.', '0008C9': 'TechniSat Digital GmbH Daun', 'B019C6': 'Apple, Inc.', '3866F0': 'Apple, Inc.', '008009': 'JUPITER SYSTEMS, INC.', '00C064': 'General Datacomm LLC', '1CAB34': 'New H3C Technologies Co., Ltd', '3C7843': 'HUAWEI TECHNOLOGIES CO.,LTD', '5C0979': 'HUAWEI TECHNOLOGIES CO.,LTD', 'E4FB5D': 'HUAWEI TECHNOLOGIES CO.,LTD', 'E86819': 'HUAWEI TECHNOLOGIES CO.,LTD', '0001CC': 'Japan Total Design Communication Co., Ltd.', '0030C8': 'GAD LINE, LTD.', '0016E0': '3Com Ltd', 'D8DECE': 'ISUNG CO.,LTD', '801934': 'Intel Corporate', '703EAC': 'Apple, Inc.', '0011C0': 'Aday Technology Inc', '0005F1': 'Vrcom, Inc.', 'AC512C': 'Infinix mobility limited', '309935': 'zte corporation', '0C72D9': 'zte corporation', '1062D0': 'Technicolor CH USA Inc.', '50642B': 'XIAOMI Electronics,CO.,LTD', '28401A': 'C8 MediSensors, Inc.', '30C01B': 'Shenzhen Jingxun Software Telecommunication Technology Co.,Ltd', '8886C2': 'STABILO International GmbH', '08A8A1': 'Cyclotronics Power Concepts, Inc', 'F4B520': 'Biostar Microtech international corp.', 'CC2F71': 'Intel Corporate', '001CC5': '3Com Ltd', '887A31': 'Velankani Electronics Pvt. Ltd.', '8C0F6F': 'PEGATRON CORPORATION', '8C2505': 'HUAWEI TECHNOLOGIES CO.,LTD', '8C3BAD': 'NETGEAR', '64CFD9': 'Texas Instruments', '6432A8': 'Intel Corporate', '747D24': 'Phicomm (Shanghai) Co., Ltd.', 'D09466': 'Dell Inc.', 'B8F8BE': 'BLUECOM', 'A47B9D': 'Espressif Inc.', '7C2EDD': 'Samsung Electronics Co.,Ltd', '3CF7A4': 'Samsung Electronics Co.,Ltd', '38E595': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', 'BC9680': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', '2C5D93': 'Ruckus Wireless', '38FF36': 'Ruckus Wireless', '84183A': 'Ruckus Wireless', '24C9A1': 'Ruckus Wireless', '002482': 'Ruckus Wireless', '689234': 'Ruckus Wireless', '50A733': 'Ruckus Wireless', '103034': 'Cara Systems', '0000FE': 'Annapolis Micro Systems, Inc.', '00D01F': 'Senetas Corporation Ltd', 'E0CBBC': 'Cisco Meraki', '6447E0': 'Feitian Technologies Co., Ltd', 'B44F96': 'Zhejiang Xinzailing Technology co., ltd', '4C65A8': 'IEEE Registration Authority', 'D822F4': 'Avnet Silica', 'B0DFC1': 'Tenda Technology Co.,Ltd.Dongguan branch', '9C6F52': 'zte corporation', 'E86D65': 'AUDIO MOBIL Elektronik GmbH', '706E6D': 'Cisco Systems, Inc', '604762': 'Beijing Sensoro Technology Co.,Ltd.', '986F60': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'BC1C81': 'Sichuan iLink Technology Co., Ltd.', '900A1A': 'Taicang T&W Electronics', '506E92': 'Innocent Technology Co., Ltd.', '30FE31': 'Nokia', '98F2B3': 'Hewlett Packard Enterprise', '348F27': 'Ruckus Wireless', '2C9EEC': 'Jabil Circuit Penang', 'C4571F': 'June Life Inc', '886AE3': 'Alpha Networks Inc.', '1C4D70': 'Intel Corporate', 'E8E1E1': 'Gemtek Technology Co., Ltd.', '28070D': 'GUANGZHOU WINSOUND INFORMATION TECHNOLOGY CO.,LTD.', '6CB227': 'Sony Video & Sound Products Inc.', '000CAB': 'Commend International GmbH', '001CFA': 'Alarm.com', '60313B': 'Sunnovo International Limited', '00A3D1': 'Cisco Systems, Inc', 'C0A5DD': 'SHENZHEN MERCURY COMMUNICATION TECHNOLOGIES CO.,LTD.', '745427': 'SHENZHEN FAST TECHNOLOGIES CO.,LTD', '60720B': 'BLU Products Inc', '308976': 'DALIAN LAMBA TECHNOLOGY CO.,LTD', '2C2617': 'Oculus VR, LLC', '34D954': 'WiBotic Inc.', '4857DD': 'Facebook Inc', '487D2E': 'TP-LINK TECHNOLOGIES CO.,LTD.', '488D36': 'Arcadyan Corporation', 'BCD713': 'Owl Labs', 'FC4D8C': 'SHENZHEN PANTE ELECTRONICS TECHNOLOGY CO., LTD', 'FC06ED': 'M2Motive Technology Inc.', 'F0D4F6': 'Lars Thrane A/S', 'F4A997': 'CANON INC.', 'B0DAF9': 'ARRIS Group, Inc.', '1835D1': 'ARRIS Group, Inc.', '64DFE9': 'ATEME', '10C6FC': 'Garmin International', 'AC2205': 'Compal Broadband Networks, Inc.', '80A036': 'Shanghai MXCHIP Information Technology Co., Ltd.', 'B43934': 'Pen Generations, Inc.', '7426AC': 'Cisco Systems, Inc', 'F07485': 'NGD Systems, Inc.', '509A4C': 'Dell Inc.', '20F452': 'Shanghai IUV Software Development Co. Ltd', '405CFD': 'Dell Inc.', 'A0094C': 'CenturyLink', 'B02628': 'Broadcom Limited', '9874DA': 'Infinix mobility limited', '40B4F0': 'Juniper Networks', '1C5A0B': 'Tegile Systems', '046E02': 'OpenRTLS Group', '900E83': 'Monico Monitoring, Inc.', '601466': 'zte corporation', '143F27': 'Noccela Oy', '105887': 'Fiberhome Telecommunication Technologies Co.,LTD', '704CA5': 'Fortinet, Inc.', '9C061B': 'Hangzhou H3C Technologies Co., Limited', '50338B': 'Texas Instruments', 'E037BF': 'Wistron Neweb Corporation', 'E81367': 'AIRSOUND Inc.', '68262A': 'SICHUAN TIANYI COMHEART TELECOMCO., LTD', 'E8DE8E': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', 'D8D866': 'SHENZHEN TOZED TECHNOLOGIES CO.,LTD.', 'D8C06A': 'Hunantv.com Interactive Entertainment Media Co.,Ltd.', 'AC202E': 'Hitron Technologies. Inc', '9C32A9': 'SICHUAN TIANYI COMHEART TELECOMCO., LTD', '6854ED': 'Alcatel-Lucent', '680235': 'Konten Networks Inc.', '38AC3D': 'Nephos Inc', '001192': 'Cisco Systems, Inc', '38F135': 'SensorTec-Canada', 'C49DED': 'Microsoft Corporation', '98A40E': 'Snap, Inc.', 'F40343': 'Hewlett Packard Enterprise', 'AC7409': 'Hangzhou H3C Technologies Co., Limited', 'F093C5': 'Garland Technology', '9810E8': 'Apple, Inc.', 'C0D012': 'Apple, Inc.', 'BCA920': 'Apple, Inc.', '48A195': 'Apple, Inc.', 'F80377': 'Apple, Inc.', '8058F8': 'Motorola Mobility LLC, a Lenovo Company', 'F49634': 'Intel Corporate', '107D1A': 'Dell Inc.', '70AF24': 'TP Vision Belgium NV', 'A41163': 'IEEE Registration Authority', 'E8D11B': 'ASKEY COMPUTER CORP', 'C4D197': 'Ventia Utility Services', '7CE97C': 'ITEL MOBILE LIMITED', '2C86D2': 'Cisco Systems, Inc', 'DCA4CA': 'Apple, Inc.', '8C8FE9': 'Apple, Inc.', '70AF25': 'Nishiyama Industry Co.,LTD.', '0C5F35': 'Niagara Video Corporation', 'B8224F': 'SICHUAN TIANYI COMHEART TELECOMCO., LTD', '9800C1': 'GuangZhou CREATOR Technology Co.,Ltd.(CHINA)', '903D6B': 'Zicon Technology Corp.', '54E1AD': 'LCFC(HeFei) Electronics Technology co., ltd', 'B0C46C': 'Senseit', '98D3D2': 'MEKRA Lang GmbH & Co. KG', 'CCBE59': 'Calix Inc.', '001912': 'Welcat Inc', '8C78D7': 'SHENZHEN FAST TECHNOLOGIES CO.,LTD', 'B8EAAA': 'ICG NETWORKS CO.,ltd', '5CBA37': 'Microsoft Corporation', '00C0C6': 'PERSONAL MEDIA CORP.', 'B8F883': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'DCFE18': 'TP-LINK TECHNOLOGIES CO.,LTD.', '704F57': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'F8A34F': 'zte corporation', 'C87324': ' Sow Cheng Technology Co. Ltd.', '0002A1': 'World Wide Packets', '00E022': 'Analog Devices, Inc.', '14B7F8': 'Technicolor CH USA Inc.', '908674': 'SICHUAN TIANYI COMHEART TELECOMCO., LTD', '50E666': 'Shenzhen Techtion Electronics Co., Ltd.', '3C7F6F': 'Telechips, Inc.', 'B04089': 'Senient Systems LTD', '5425EA': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C894BB': 'HUAWEI TECHNOLOGIES CO.,LTD', '10B1F8': 'HUAWEI TECHNOLOGIES CO.,LTD', '089E08': 'Google, Inc.', '28FECD': 'Lemobile Information Technology (Beijing) Co., Ltd.', '4C4E03': 'TCT mobile ltd', '00210D': 'SAMSIN INNOTEC', '0016D3': 'Wistron Corporation', '001F16': 'Wistron Corporation', '00262D': 'Wistron Corporation', '0495E6': 'Tenda Technology Co.,Ltd.Dongguan branch', '901711': 'Hagenuk Marinekommunikation GmbH', '0010DE': 'INTERNATIONAL DATACASTING CORPORATION', 'B0B98A': 'NETGEAR', '805A04': 'LG Electronics (Mobile Communications)', 'C0D9F7': 'ShanDong Domor Intelligent S&T CO.,Ltd', '00608B': 'ConferTech International', '702D84': 'i4C Innovations', '2C200B': 'Apple, Inc.', '8866A5': 'Apple, Inc.', '000277': 'Cash Systemes Industrie', 'CCA219': 'SHENZHEN ALONG INVESTMENT CO.,LTD', '4C1A3A': 'PRIMA Research And Production Enterprise Ltd.', '14B31F': 'Dell Inc.', '000048': 'Seiko Epson Corporation', 'B0E892': 'Seiko Epson Corporation', 'AC1826': 'Seiko Epson Corporation', 'A4EE57': 'Seiko Epson Corporation', '9CAED3': 'Seiko Epson Corporation', '707C69': 'Avaya Inc', 'ACC1EE': 'Xiaomi Communications Co Ltd', '5419C8': 'vivo Mobile Communication Co., Ltd.', '8CA5A1': 'Oregano Systems - Design & Consulting GmbH', 'B8ECA3': 'Zyxel Communications Corporation', '38BC01': 'HUAWEI TECHNOLOGIES CO.,LTD', '341E6B': 'HUAWEI TECHNOLOGIES CO.,LTD', '886639': 'HUAWEI TECHNOLOGIES CO.,LTD', 'BC8385': 'Microsoft Corporation', '001438': 'Hewlett Packard Enterprise', 'E4B005': 'Beijing IQIYI Science & Technology Co., Ltd.', 'B05216': 'Hon Hai Precision Ind. Co.,Ltd.', '500B91': 'IEEE Registration Authority', 'F8461C': 'Sony Interactive Entertainment Inc.', '704D7B': 'ASUSTek COMPUTER INC.', 'A0E4CB': 'Zyxel Communications Corporation', '284ED7': 'OutSmart Power Systems, Inc.', '64A68F': 'Zhongshan Readboy Electronics Co.,Ltd', '00425A': 'Cisco Systems, Inc', '18DBF2': 'Dell Inc.', '18F87A': 'i3 International Inc.', '4C26E7': 'Welgate Co., Ltd.', '006041': 'Yokogawa Digital Computer Corporation', '14A78B': 'Zhejiang Dahua Technology Co., Ltd.', '00C05A': 'SEMAPHORE COMMUNICATIONS CORP.', '0007F9': 'Sensaphone', '001CB3': 'Apple, Inc.', '686975': 'Angler Labs Inc', '20D25F': 'SmartCap Technologies', 'E47DBD': 'Samsung Electronics Co.,Ltd', '48D343': 'ARRIS Group, Inc.', 'A0B8F8': 'Amgen U.S.A. Inc.', '884477': 'HUAWEI TECHNOLOGIES CO.,LTD', '149D09': 'HUAWEI TECHNOLOGIES CO.,LTD', '4C11BF': 'Zhejiang Dahua Technology Co., Ltd.', '2C598A': 'LG Electronics (Mobile Communications)', 'C816A5': 'Masimo Corporation', '0C0227': 'Technicolor CH USA Inc.', '9CFBD5': 'vivo Mobile Communication Co., Ltd.', '18F76B': 'Zhejiang Winsight Technology CO.,LTD', '583112': 'DRUST', '9C83BF': 'PRO-VISION, Inc.', '78EF4C': 'Unetconvergence Co., Ltd.', '58696C': 'Ruijie Networks Co.,LTD', 'E07C13': 'zte corporation', 'F41F88': 'zte corporation', '407183': 'Juniper Networks', 'C81B5C': 'BCTech', '5CE30E': 'ARRIS Group, Inc.', '1CC0E1': 'IEEE Registration Authority', '905C44': 'Compal Broadband Networks, Inc.', '44BA46': 'SICHUAN TIANYI COMHEART TELECOMCO.,LTD', '001A39': 'Merten GmbH&CoKG', 'FCECDA': 'Ubiquiti Networks Inc.', '14EDBB': '2Wire Inc', '687251': 'Ubiquiti Networks Inc.', 'B4FBE4': 'Ubiquiti Networks Inc.', '00B0E1': 'Cisco Systems, Inc', '005093': 'BOEING', 'F07960': 'Apple, Inc.', 'E43ED7': 'Arcadyan Corporation', 'A0D795': 'Apple, Inc.', '0090E7': 'HORSCH ELEKTRONIK AG', '28EE52': 'TP-LINK TECHNOLOGIES CO.,LTD.', '18E29F': 'vivo Mobile Communication Co., Ltd.', 'A04E01': 'CENTRAL ENGINEERING co.,ltd.', '28CA09': 'ThyssenKrupp Elevators (Shanghai) Co.,Ltd', '3C8BCD': 'Alcatel-Lucent Shanghai Bell Co., Ltd', '2047ED': 'BSkyB Ltd', '00F22C': 'Shanghai B-star Technology Co.,Ltd.', '842519': 'Samsung Electronics', '5C2443': 'O-Sung Telecom Co., Ltd.', '24920E': 'Samsung Electronics Co.,Ltd', 'FC4203': 'Samsung Electronics Co.,Ltd', 'A01081': 'Samsung Electronics Co.,Ltd', '4CF95D': 'HUAWEI TECHNOLOGIES CO.,LTD', '8421F1': 'HUAWEI TECHNOLOGIES CO.,LTD', '707990': 'HUAWEI TECHNOLOGIES CO.,LTD', '0005EE': 'Vanderbilt International (SWE) AB ', '5454CF': 'PROBEDIGITAL CO.,LTD', 'F0D5BF': 'Intel Corporate', 'C09F05': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '5C4979': 'AVM Audiovisuelles Marketing und Computersysteme GmbH', 'F8633F': 'Intel Corporate', '088620': 'TECNO MOBILE LIMITED', '981333': 'zte corporation', '6474F6': 'Shooter Detection Systems', 'BC4760': 'Samsung Electronics Co.,Ltd', '748A69': 'Korea Image Technology Co., Ltd', 'D0DB32': 'Nokia Corporation', 'E80036': 'Befs co,. ltd', '04180F': 'Samsung Electronics Co.,Ltd', '2013E0': 'Samsung Electronics Co.,Ltd', '002566': 'Samsung Electronics Co.,Ltd', 'B88EDF': 'Zencheer Communication Technology Co., Ltd.', 'C0F945': 'Toshiba Toko Meter Systems Co., LTD.', '70F8E7': 'IEEE Registration Authority', '002485': 'ConteXtream Ltd', '28FCF6': 'Shenzhen Xin KingBrand enterprises Co.,Ltd', '689423': 'Hon Hai Precision Ind. Co.,Ltd.', '844BF5': 'Hon Hai Precision Ind. Co.,Ltd.', '08EDB9': 'Hon Hai Precision Ind. Co.,Ltd.', '3C77E6': 'Hon Hai Precision Ind. Co.,Ltd.', '70188B': 'Hon Hai Precision Ind. Co.,Ltd.', '001F58': 'EMH Energiemesstechnik GmbH', '0016DF': 'Lundinova AB', '001D0C': 'MobileCompia', 'DC7144': 'SAMSUNG ELECTRO MECHANICS CO., LTD.', '980C82': 'SAMSUNG ELECTRO MECHANICS CO., LTD.', '5C6D20': 'Hon Hai Precision Ind. Co.,Ltd.', '5CAC4C': 'Hon Hai Precision Ind. Co.,Ltd.', 'D42C44': 'Cisco Systems, Inc', '843DC6': 'Cisco Systems, Inc', 'A00BBA': 'SAMSUNG ELECTRO MECHANICS CO., LTD.', '606BBD': 'Samsung Electronics Co.,Ltd', '00214C': 'Samsung Electronics Co.,Ltd', '00166B': 'Samsung Electronics Co.,Ltd', '8018A7': 'Samsung Electronics Co.,Ltd', 'F47B5E': 'Samsung Electronics Co.,Ltd', '08C6B3': 'QTECH LLC', 'D487D8': 'Samsung Electronics Co.,Ltd', '184617': 'Samsung Electronics Co.,Ltd', '380A94': 'Samsung Electronics Co.,Ltd', 'D0DFC7': 'Samsung Electronics Co.,Ltd', '0000F0': 'Samsung Electronics Co.,Ltd', '8CC8CD': 'Samsung Electronics Co.,Ltd', 'D0C1B1': 'Samsung Electronics Co.,Ltd', '70F927': 'Samsung Electronics Co.,Ltd', 'F0728C': 'Samsung Electronics Co.,Ltd', '34AA8B': 'Samsung Electronics Co.,Ltd', 'BC4486': 'Samsung Electronics Co.,Ltd', '20D390': 'Samsung Electronics Co.,Ltd', '0018AF': 'Samsung Electronics Co.,Ltd', '001EE1': 'Samsung Electronics Co.,Ltd', 'A8F274': 'Samsung Electronics Co.,Ltd', '6C1E90': 'Hansol Technics Co., Ltd.', 'C8DE51': ' IntegraOptics', '34E71C': 'Shenzhen YOUHUA Technology Co., Ltd', '886AB1': 'vivo Mobile Communication Co., Ltd.', '24DBED': 'Samsung Electronics Co.,Ltd', 'C45006': 'Samsung Electronics Co.,Ltd', '88329B': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', '9401C2': 'Samsung Electronics Co.,Ltd', '50FC9F': 'Samsung Electronics Co.,Ltd', '380B40': 'Samsung Electronics Co.,Ltd', '1449E0': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', 'D02544': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', '005A13': 'HUAWEI TECHNOLOGIES CO.,LTD', '946124': 'Pason Systems', 'B8FF61': 'Apple, Inc.', '00177E': 'Meshcom Technologies Inc.', '68C44D': 'Motorola Mobility LLC, a Lenovo Company', 'C4F5A5': 'Kumalift Co., Ltd.', '38D269': 'Texas Instruments', 'C8FD19': 'Texas Instruments', '508CB1': 'Texas Instruments', '98F058': 'Lynxspring, Incl.', '9884E3': 'Texas Instruments', 'A00460': 'NETGEAR', '70B14E': 'ARRIS Group, Inc.', '304487': 'Hefei Radio Communication Technology Co., Ltd ', '943DC9': 'Asahi Net, Inc.', '0081C4': 'Cisco Systems, Inc', '440444': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '400D10': 'ARRIS Group, Inc.', '58E876': 'IEEE Registration Authority', '2C9D1E': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D03742': 'Yulong Computer Telecommunication Scientific (Shenzhen) Co.,Ltd', 'F8E61A': 'Samsung Electronics Co.,Ltd', '84B541': 'Samsung Electronics Co.,Ltd', '006F64': 'Samsung Electronics Co.,Ltd', 'DC6672': 'Samsung Electronics Co.,Ltd', 'EC8EB5': 'Hewlett Packard', '70AF6A': 'SHENZHEN FENGLIAN TECHNOLOGY CO., LTD.', 'E0DDC0': 'vivo Mobile Communication Co., Ltd.', '00233E': 'Alcatel-Lucent IPD', '6CBEE9': 'Alcatel-Lucent IPD', '00164D': 'Alcatel-Lucent IPD', '001AF0': 'Alcatel-Lucent IPD', '38521A': 'Nokia', '20F543': 'Hui Zhou Gaoshengda Technology Co.,LTD', '0015E8': 'Nortel Networks', '00159B': 'Nortel Networks', '001540': 'Nortel Networks', '001ECA': 'Nortel Networks', '00130A': 'Nortel Networks', '001F0A': 'Nortel Networks', 'AC9CE4': 'Alcatel-Lucent Shanghai Bell Co., Ltd', '001765': 'Nortel Networks', '000F06': 'Nortel Networks', '20F41B': 'Shenzhen Bilian electronic CO.,LTD', '001E40': 'Shanghai DareGlobal Technologies Co.,Ltd', '94D723': 'Shanghai DareGlobal Technologies Co.,Ltd', 'FCFAF7': 'Shanghai Baud Data Communication Co.,Ltd.', 'D826B9': 'Guangdong Coagent Electronics S&amp;T Co.,Ltd.', '6CA858': 'Fiberhome Telecommunication Technologies Co.,LTD', 'A84E3F': 'Hitron Technologies. Inc', 'CC2D8C': 'LG ELECTRONICS INC', '0C4885': 'LG Electronics (Mobile Communications)', 'C4047B': 'Shenzhen YOUHUA Technology Co., Ltd', 'A091C8': 'zte corporation', 'B437D1': 'IEEE Registration Authority', '2C6A6F': 'IEEE Registration Authority', '40667A': 'mediola - connected living AG', '9C2A83': 'Samsung Electronics Co.,Ltd', 'C80210': 'LG Innotek', '08D833': 'Shenzhen RF Technology Co., Ltd', 'A46032': 'MRV Communications (Networks) LTD', 'E892A4': 'LG Electronics (Mobile Communications)', '10683F': 'LG Electronics (Mobile Communications)', '40B0FA': 'LG Electronics (Mobile Communications)', 'A039F7': 'LG Electronics (Mobile Communications)', '0022A9': 'LG Electronics (Mobile Communications)', '0025E5': 'LG Electronics (Mobile Communications)', '0021FB': 'LG Electronics (Mobile Communications)', '34FCEF': 'LG Electronics (Mobile Communications)', 'BCF5AC': 'LG Electronics (Mobile Communications)', '98D6F7': 'LG Electronics (Mobile Communications)', '700514': 'LG Electronics (Mobile Communications)', '1CCAE3': 'IEEE Registration Authority', 'E4956E': 'IEEE Registration Authority', '0055DA': 'IEEE Registration Authority', '78C2C0': 'IEEE Registration Authority', '00199D': 'Vizio, Inc', '001938': 'UMB Communications Co., Ltd.', '4439C4': 'Universal Global Scientific Industrial Co., Ltd.', '402CF4': 'Universal Global Scientific Industrial Co., Ltd.', '54AB3A': 'QUANTA COMPUTER INC.', '683563': 'SHENZHEN LIOWN ELECTRONICS CO.,LTD.', 'E89A8F': 'QUANTA COMPUTER INC.', '004072': 'Applied Innovation Inc.', '00E08B': 'QLogic Corporation', 'D8EB97': 'TRENDnet, Inc.', '000E5C': 'ARRIS Group, Inc.', '845DD7': 'Shenzhen Netcom Electronics Co.,Ltd', '00B064': 'Cisco Systems, Inc', '68A0F6': 'HUAWEI TECHNOLOGIES CO.,LTD', '001C7E': 'Toshiba', '002318': 'Toshiba', 'B86B23': 'Toshiba', '0008F1': 'Voltaire', '000EE8': 'Zioncom Electronics (Shenzhen) Ltd.', '00C095': 'ZNYX Networks, Inc.', '002025': 'CONTROL TECHNOLOGY, INC.', '000B6B': 'Wistron Neweb Corporation', '6002B4': 'Wistron Neweb Corporation', '94DF4E': 'Wistron InfoComm(Kunshan)Co.,Ltd.', '98EECB': 'Wistron Infocomm (Zhongshan) Corporation', '001E37': 'Universal Global Scientific Industrial Co., Ltd.', '001A6B': 'Universal Global Scientific Industrial Co., Ltd.', '001641': 'Universal Global Scientific Industrial Co., Ltd.', '0010C6': 'Universal Global Scientific Industrial Co., Ltd.', '00247E': 'Universal Global Scientific Industrial Co., Ltd.', '00DD0A': 'UNGERMANN-BASS INC.', '1C57D8': 'Kraftway Corporation PLC', '643AB1': 'SICHUAN TIANYI COMHEART TELECOMCO.,LTD', '8048A5': 'SICHUAN TIANYI COMHEART TELECOMCO.,LTD', '683E34': 'MEIZU Technology Co., Ltd.', '002517': 'Venntis, LLC', '00600F': 'Westell Technologies Inc.', '00183A': 'Westell Technologies Inc.', '446EE5': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C8778B': 'Mercury Systems – Trusted Mission Solutions, Inc. ', '00044B': 'NVIDIA', 'AC9B0A': 'Sony Corporation', '104FA8': 'Sony Corporation', '48FCB6': 'LAVA INTERNATIONAL(H.K) LIMITED', 'B0E235': 'Xiaomi Communications Co Ltd', '40C729': 'Sagemcom Broadband SAS', '14C913': 'LG Electronics', 'AC040B': 'Peloton Interactive, Inc', '5067F0': 'Zyxel Communications Corporation', '001349': 'Zyxel Communications Corporation', 'D8E0B8': 'BULAT LLC', '603197': 'Zyxel Communications Corporation', 'F8A097': 'ARRIS Group, Inc.', 'FC2325': 'EosTek (Shenzhen) Co., Ltd.', '789CE7': 'Shenzhen Aikede Technology Co., Ltd', '509F3B': 'OI ELECTRIC CO.,LTD', 'FC3D93': 'LONGCHEER TELECOMMUNICATION LIMITED', '00F663': 'Cisco Systems, Inc', 'C0C976': 'Shenzhen TINNO Mobile Technology Corp.', '588BF3': 'Zyxel Communications Corporation', '001BFE': 'Zavio Inc.', '5410EC': 'Microchip Technology Inc.', '0004A3': 'Microchip Technology Inc.', '88F7C7': 'Technicolor CH USA Inc.', 'A06090': 'Samsung Electronics Co.,Ltd', 'BC765E': 'Samsung Electronics Co.,Ltd', 'E0A8B8': 'Le Shi Zhi Xin Electronic Technology (Tianjin) Limited', 'F45B73': 'Wanjiaan Interconnected Technology Co., Ltd', '2CDDA3': 'Point Grey Research Inc.', '00809F': 'ALE International', 'B824F0': 'SOYO Technology Development Co., Ltd.', 'D85B2A': 'Samsung Electronics Co.,Ltd', 'A0B437': ' GD Mission Systems', 'E09861': 'Motorola Mobility LLC, a Lenovo Company', '9C8ECD': 'Amcrest Technologies', 'B88198': 'Intel Corporate', 'A009ED': 'Avaya Inc', '0014B4': 'General Dynamics United Kingdom Ltd', '8C59C3': 'ADB Italia ', '000BDE': 'TELDIX GmbH', 'FCA89A': 'Sunitec Enterprise Co.,Ltd', '1C7B23': 'Qingdao Hisense Communications Co.,Ltd.', 'B0D7CC': 'Tridonic GmbH & Co KG', '8C6D50': 'SHENZHEN MTC CO LTD', '005F86': 'Cisco Systems, Inc', '381DD9': 'FN-LINK TECHNOLOGY LIMITED', '1CB9C4': 'Ruckus Wireless', 'C83DFC': 'Pioneer DJ Corporation', 'CCD31E': 'IEEE Registration Authority', '1C6E76': 'Quarion Technology Inc', '34B354': 'HUAWEI TECHNOLOGIES CO.,LTD', '6C0EE6': 'Chengdu Xiyida Electronic Technology Co,.Ltd', '000763': 'Sunniwell Cyber Tech. Co., Ltd.', '0062EC': 'Cisco Systems, Inc', 'CC167E': 'Cisco Systems, Inc', '48C663': 'GTO Access Systems LLC', 'C02FF1': 'Volta Networks', 'E8A7F2': 'sTraffic', '001F20': 'Logitech Europe SA', '74258A': 'Hangzhou H3C Technologies Co., Limited', '741F4A': 'Hangzhou H3C Technologies Co., Limited', 'CC500A': 'Fiberhome Telecommunication Technologies Co.,LTD', 'D046DC': 'Southwest Research Institute', 'C46AB7': 'Xiaomi Communications Co Ltd', 'E41D2D': 'Mellanox Technologies, Inc.', '240A11': 'TCT mobile ltd', 'D8E56D': 'TCT mobile ltd', '90C1C6': 'Apple, Inc.', '000AED': 'HARTING Electronics GmbH', '0CDA41': 'Hangzhou H3C Technologies Co., Limited', '70A2B3': 'Apple, Inc.', 'F40F24': 'Apple, Inc.', '4C57CA': 'Apple, Inc.', '540593': 'WOORI ELEC Co.,Ltd', '50680A': 'HUAWEI TECHNOLOGIES CO.,LTD', '00A006': 'IMAGE DATA PROCESSING SYSTEM GROUP', 'C83F26': 'Microsoft Corporation', '000C49': 'Dangaard Telecom Denmark A/S', '002238': 'LOGIPLUS', '3497F6': 'ASUSTek COMPUTER INC.', '0008B9': 'Kaonmedia CO., LTD.', 'A0B662': 'Acutvista Innovation Co., Ltd.', 'E42F56': 'OptoMET GmbH', 'F8DA0C': 'Hon Hai Precision Ind. Co.,Ltd.', '1C1B0D': 'GIGA-BYTE TECHNOLOGY CO.,LTD.', '48E9F1': 'Apple, Inc.', '903809': 'Ericsson AB', '487ADA': 'Hangzhou H3C Technologies Co., Limited', '001F45': 'Enterasys', 'A08CFD': 'Hewlett Packard', '000D87': 'Elitegroup Computer Systems Co.,Ltd.', '1078D2': 'Elitegroup Computer Systems Co.,Ltd.', '000E03': 'Emulex Corporation', '00168F': 'GN Netcom A/S', 'E4F3F5': 'SHENZHEN MERCURY COMMUNICATION TECHNOLOGIES CO.,LTD.', '60B387': 'Synergics Technologies GmbH', 'A4D8CA': 'HONG KONG WATER WORLD TECHNOLOGY CO. LIMITED', '8019FE': 'JianLing Technology CO., LTD', '60B4F7': 'Plume Design Inc', '44650D': 'Amazon Technologies Inc.', '00CAE5': 'Cisco Systems, Inc', '004268': 'Cisco Systems, Inc', '4883C7': 'Sagemcom Broadband SAS', '40163B': 'Samsung Electronics Co.,Ltd', 'FC3F7C': 'HUAWEI TECHNOLOGIES CO.,LTD', '384C4F': 'HUAWEI TECHNOLOGIES CO.,LTD', '0CBF3F': 'Shenzhen Lencotion Technology Co.,Ltd', '50FF99': 'IEEE Registration Authority', '84E323': 'Green Wave Telecommunication SDN BHD', '002197': 'Elitegroup Computer Systems Co.,Ltd.', '001E90': 'Elitegroup Computer Systems Co.,Ltd.', '0022B1': 'Elbit Systems Ltd.', '140C5B': 'PLNetworks', '0000B4': 'Edimax Technology Co. Ltd.', '48555F': 'Fiberhome Telecommunication Technologies Co.,LTD', 'BC9889': 'Fiberhome Telecommunication Technologies Co.,LTD', '24615A': 'China Mobile Group Device Co.,Ltd.', 'FC084A': 'FUJITSU LIMITED', '405EE1': 'Shenzhen H&T Intelligent Control Co.,Ltd.', '002578': 'JSC Concern Sozvezdie', 'D4F207': 'DIAODIAO(Beijing)Technology CO.,Ltd', 'D4AD2D': 'Fiberhome Telecommunication Technologies Co.,LTD', 'F08CFB': 'Fiberhome Telecommunication Technologies Co.,LTD', '001706': 'Techfaithwireless Communication Technology Limited.', '30B49E': 'TP-LINK TECHNOLOGIES CO.,LTD.', '34E70B': 'HAN Networks Co., Ltd', '007888': 'Cisco Systems, Inc', '705A9E': 'Technicolor CH USA Inc.', '04A316': 'Texas Instruments', '900325': 'HUAWEI TECHNOLOGIES CO.,LTD', '98E7F5': 'HUAWEI TECHNOLOGIES CO.,LTD', '085BDA': 'CliniCare LTD', '1CC035': 'PLANEX COMMUNICATIONS INC.', '34543C': 'TAKAOKA TOKO CO.,LTD.', '1866DA': 'Dell Inc.', '583277': 'Reliance Communications LLC', 'C83870': 'Samsung Electronics Co.,Ltd', '1C553A': 'QianGua Corp.', '9C9D5D': 'Raden Inc', 'DC4D23': 'MRV Comunications', '0023B3': 'Lyyn AB', '008E73': 'Cisco Systems, Inc', '0015C1': 'Sony Interactive Entertainment Inc.', 'A09D91': 'SoundBridge', '40B688': 'LEGIC Identsystems AG', '248A07': 'Mellanox Technologies, Inc.', '9CD48B': 'Innolux Technology Europe BV', '00351A': 'Cisco Systems, Inc', '90A62F': 'NAVER', 'C0C522': 'ARRIS Group, Inc.', '00AF1F': 'Cisco Systems, Inc', 'C0CCF8': 'Apple, Inc.', '803896': 'SHARP Corporation', '0060EC': 'HERMARY OPTO ELECTRONICS INC.', '402E28': 'MiXTelematics', '6C8FB5': 'Microsoft Mobile Oy', '1C9E46': 'Apple, Inc.', '9C4FDA': 'Apple, Inc.', '8489AD': 'Apple, Inc.', 'C4E510': 'Mechatro, Inc.', '18A6F7': 'TP-LINK TECHNOLOGIES CO.,LTD.', '00180F': 'Nokia Danmark A/S', 'C8979F': 'Nokia Corporation', 'ECF35B': 'Nokia Corporation', '0021FC': 'Nokia Danmark A/S', '001F5D': 'Nokia Danmark A/S', '001F01': 'Nokia Danmark A/S', '001BEE': 'Nokia Danmark A/S', '544408': 'Nokia Corporation', '3CC243': 'Nokia Corporation', '347E39': 'Nokia Danmark A/S', '001979': 'Nokia Danmark A/S', '0025D0': 'Nokia Danmark A/S', 'AC3A7A': 'Roku, Inc.', 'B83E59': 'Roku, Inc.', 'DC3A5E': 'Roku, Inc.', '001A73': 'Gemtek Technology Co., Ltd.', '00904B': 'Gemtek Technology Co., Ltd.', '00507F': 'DrayTek Corp.', '001A7F': 'GCI Science & Technology Co.,LTD', '0024D4': 'FREEBOX SAS', '647791': 'Samsung Electronics Co.,Ltd', '9CE6E7': 'Samsung Electronics Co.,Ltd', '9C0298': 'Samsung Electronics Co.,Ltd', '0090A2': 'CyberTAN Technology Inc.', '0090D6': 'Crystal Group, Inc.', '28987B': 'Samsung Electronics Co.,Ltd', '0C715D': 'Samsung Electronics Co.,Ltd', '7C1CF1': 'HUAWEI TECHNOLOGIES CO.,LTD', '78F557': 'HUAWEI TECHNOLOGIES CO.,LTD', 'E02861': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D0D04B': 'HUAWEI TECHNOLOGIES CO.,LTD', '00F46F': 'Samsung Electronics Co.,Ltd', '54FA3E': 'Samsung Electronics Co.,Ltd', '0C8910': 'Samsung Electronics Co.,Ltd', 'D476EA': 'zte corporation', 'D8C4E9': 'Samsung Electronics Co.,Ltd', 'BCD11F': 'Samsung Electronics Co.,Ltd', 'F4428F': 'Samsung Electronics Co.,Ltd', '446D6C': 'Samsung Electronics Co.,Ltd', '001018': 'Broadcom', '18C086': 'Broadcom', '78ABBB': 'Samsung Electronics Co.,Ltd', '00175A': 'Cisco Systems, Inc', 'C8FF28': 'Liteon Technology Corporation', 'B81619': 'ARRIS Group, Inc.', 'B077AC': 'ARRIS Group, Inc.', '9C80DF': 'Arcadyan Technology Corporation', '002308': 'Arcadyan Technology Corporation', '880355': 'Arcadyan Technology Corporation', '34BB1F': 'BlackBerry RTS', '480031': 'HUAWEI TECHNOLOGIES CO.,LTD', '4C09D4': 'Arcadyan Technology Corporation', 'FCB4E6': 'ASKEY COMPUTER CORP', '0896D7': 'AVM GmbH', '506A03': 'NETGEAR', 'E0469A': 'NETGEAR', '30469A': 'NETGEAR', '100D7F': 'NETGEAR', '504A6E': 'NETGEAR', '406F2A': 'BlackBerry RTS', '0015D0': 'ARRIS Group, Inc.', '001596': 'ARRIS Group, Inc.', '04E676': 'AMPAK Technology, Inc.', '00192C': 'ARRIS Group, Inc.', '00195E': 'ARRIS Group, Inc.', '001A1B': 'ARRIS Group, Inc.', '001A66': 'ARRIS Group, Inc.', '001A77': 'ARRIS Group, Inc.', '64ED57': 'ARRIS Group, Inc.', 'A4ED4E': 'ARRIS Group, Inc.', '00211E': 'ARRIS Group, Inc.', '002180': 'ARRIS Group, Inc.', '001BDD': 'ARRIS Group, Inc.', '001D6B': 'ARRIS Group, Inc.', '001DBE': 'ARRIS Group, Inc.', '0012C9': 'ARRIS Group, Inc.', '0023A2': 'ARRIS Group, Inc.', '0023ED': 'ARRIS Group, Inc.', '001B52': 'ARRIS Group, Inc.', '001E8D': 'ARRIS Group, Inc.', '20E564': 'ARRIS Group, Inc.', '90B134': 'ARRIS Group, Inc.', '40B7F3': 'ARRIS Group, Inc.', '0017E2': 'ARRIS Group, Inc.', '001675': 'ARRIS Group, Inc.', '000CE5': 'ARRIS Group, Inc.', '0003E0': 'ARRIS Group, Inc.', '002493': 'ARRIS Group, Inc.', '002641': 'ARRIS Group, Inc.', '0022F4': 'AMPAK Technology, Inc.', '001DBA': 'Sony Corporation', '0024BE': 'Sony Corporation', '000FDE': 'Sony Mobile Communications Inc', '002298': 'Sony Mobile Communications Inc', '2421AB': 'Sony Mobile Communications Inc', 'B8F934': 'Sony Mobile Communications Inc', '8C6422': 'Sony Mobile Communications Inc', '0CFE45': 'Sony Interactive Entertainment Inc.', '2016D8': 'Liteon Technology Corporation', 'E063E5': 'Sony Mobile Communications Inc', 'F8D0AC': 'Sony Interactive Entertainment Inc.', 'E8617E': 'Liteon Technology Corporation', '18CF5E': 'Liteon Technology Corporation', 'F0272D': 'Amazon Technologies Inc.', '84D6D0': 'Amazon Technologies Inc.', '001B59': 'Sony Mobile Communications Inc', '18FE34': 'Espressif Inc.', '985FD3': 'Microsoft Corporation', '00DA55': 'Cisco Systems, Inc', 'A4526F': 'ADB Broadband Italia', '74888B': 'ADB Broadband Italia', '008C54': 'ADB Broadband Italia', '00247B': 'Actiontec Electronics, Inc', '00D0C9': 'ADVANTECH CO., LTD.', '6487D7': 'ADB Broadband Italia', 'E0B2F1': 'FN-LINK TECHNOLOGY LIMITED', '0C4C39': 'MitraStar Technology Corp.', '0004E3': 'Accton Technology Corp', '0010B5': 'Accton Technology Corp', '001974': '16063', '002243': 'AzureWave Technology Inc.', '00006E': 'Artisoft Inc.', '38229D': 'ADB Broadband Italia', '605BB4': 'AzureWave Technology Inc.', '64D954': 'Taicang T&W Electronics', '5C36B8': 'TCL King Electrical Appliances (Huizhou) Co., Ltd', '00AA01': 'Intel Corporation', '18E3BC': 'TCT mobile ltd', 'CC1FC4': 'InVue', '685D43': 'Intel Corporate', 'A0369F': 'Intel Corporate', '64D4DA': 'Intel Corporate', '4025C2': 'Intel Corporate', '502DA2': 'Intel Corporate', '78929C': 'Intel Corporate', '00AA00': 'Intel Corporation', '00C2C6': 'Intel Corporate', '5CD2E4': 'Intel Corporate', '28B2BD': 'Intel Corporate', '843A4B': 'Intel Corporate', '5C514F': 'Intel Corporate', 'A44E31': 'Intel Corporate', '4CEB42': 'Intel Corporate', 'F81654': 'Intel Corporate', '606C66': 'Intel Corporate', '4C8093': 'Intel Corporate', 'AC7289': 'Intel Corporate', '448500': 'Intel Corporate', '0CD292': 'Intel Corporate', '4C79BA': 'Intel Corporate', '84A6C8': 'Intel Corporate', '5891CF': 'Intel Corporate', '0C8BFD': 'Intel Corporate', 'D86C02': 'Huaqin Telecom Technology Co.,Ltd', '60BEB5': 'Motorola Mobility LLC, a Lenovo Company', 'F8F1B6': 'Motorola Mobility LLC, a Lenovo Company', 'F4F1E1': 'Motorola Mobility LLC, a Lenovo Company', 'A4C494': 'Intel Corporate', '902E1C': 'Intel Corporate', 'A434D9': 'Intel Corporate', '448723': 'HOYA SERVICE CORPORATION', '9CD917': 'Motorola Mobility LLC, a Lenovo Company', '9068C3': 'Motorola Mobility LLC, a Lenovo Company', '3C197D': 'Ericsson AB', 'DCA971': 'Intel Corporate', '58946B': 'Intel Corporate', '0024D7': 'Intel Corporate', '0024D6': 'Intel Corporate', '001DE0': 'Intel Corporate', 'B4E10F': 'Dell Inc.', '002219': 'Dell Inc.', '0024E8': 'Dell Inc.', '00215C': 'Intel Corporate', '00216B': 'Intel Corporate', '0022FB': 'Intel Corporate', '001517': 'Intel Corporate', 'A0A8CD': 'Intel Corporate', '5CC5D4': 'Intel Corporate', '001E64': 'Intel Corporate', '3CFDFE': 'Intel Corporate', '000874': 'Dell Inc.', '1C4419': 'TP-LINK TECHNOLOGIES CO.,LTD.', '5C353B': 'Compal Broadband Networks, Inc.', '247C4C': 'Herman Miller', 'E46F13': 'D-Link International', '00604C': 'Sagemcom Broadband SAS', '001F95': 'Sagemcom Broadband SAS', '002348': 'Sagemcom Broadband SAS', '002691': 'Sagemcom Broadband SAS', '988B5D': 'Sagemcom Broadband SAS', 'B8E625': '2Wire Inc', 'B083FE': 'Dell Inc.', '3417EB': 'Dell Inc.', 'F8BC12': 'Dell Inc.', '18A99B': 'Dell Inc.', '00253C': '2Wire Inc', '34EF44': '2Wire Inc', 'B0E754': '2Wire Inc', '001143': 'Dell Inc.', '4C7625': 'Dell Inc.', '44A842': 'Dell Inc.', 'F01FAF': 'Dell Inc.', '00188B': 'Dell Inc.', '001372': 'Dell Inc.', '001D5A': '2Wire Inc', 'ECDF3A': 'vivo Mobile Communication Co., Ltd.', 'F42981': 'vivo Mobile Communication Co., Ltd.', '84F6FA': 'Miovision Technologies Incorporated', '70106F': 'Hewlett Packard Enterprise', 'F8E71E': 'Ruckus Wireless', '2C56DC': 'ASUSTek COMPUTER INC.', '003146': 'Juniper Networks', '90013B': 'Sagemcom Broadband SAS', '7C034C': 'Sagemcom Broadband SAS', '6C2E85': 'Sagemcom Broadband SAS', '94FEF4': 'Sagemcom Broadband SAS', '28FAA0': 'vivo Mobile Communication Co., Ltd.', '08863B': 'Belkin International Inc.', '00F871': 'DGS Denmark A/S', 'F4FC32': 'Texas Instruments', '001833': 'Texas Instruments', '001834': 'Texas Instruments', '0017E3': 'Texas Instruments', '001830': 'Texas Instruments', '0023D4': 'Texas Instruments', 'C0E422': 'Texas Instruments', 'D00790': 'Texas Instruments', '3C7DB1': 'Texas Instruments', '0017E8': 'Texas Instruments', '001783': 'Texas Instruments', '34B1F7': 'Texas Instruments', '90D7EB': 'Texas Instruments', '78DEE4': 'Texas Instruments', '2CFD37': 'Blue Calypso, Inc.', '0C6127': 'Actiontec Electronics, Inc', '2435CC': 'Zhongshan Scinan Internet of Things Co.,Ltd.', '2C3033': 'NETGEAR', '3CD92B': 'Hewlett Packard', 'BCF685': 'D-Link International', '78542E': 'D-Link International', 'C4A81D': 'D-Link International', '002191': 'D-Link Corporation', 'ACF1DF': 'D-Link International', 'CC46D6': 'Cisco Systems, Inc', '0041D2': 'Cisco Systems, Inc', '2CAB00': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A8CA7B': 'HUAWEI TECHNOLOGIES CO.,LTD', 'FCFFAA': 'IEEE Registration Authority', '3898D8': 'MERITECH CO.,LTD', '086698': 'Apple, Inc.', 'BC5436': 'Apple, Inc.', '044BED': 'Apple, Inc.', '6C8DC1': 'Apple, Inc.', 'BC4434': 'Shenzhen TINNO Mobile Technology Corp.', '04BF6D': 'Zyxel Communications Corporation', '84ACFB': 'Crouzet Automatismes', '7CBB8A': 'Nintendo Co., Ltd.', '0CD746': 'Apple, Inc.', '60A37D': 'Apple, Inc.', '68DBCA': 'Apple, Inc.', 'F88FCA': 'Google, Inc.', '88B8D0': 'Dongguan Koppo Electronic Co.,Ltd', '601971': 'ARRIS Group, Inc.', '4419B6': 'Hangzhou Hikvision Digital Technology Co.,Ltd.', 'F09CE9': 'Aerohive Networks Inc.', '9C5D12': 'Aerohive Networks Inc.', 'C413E2': 'Aerohive Networks Inc.', 'C8675E': 'Aerohive Networks Inc.', '9486CD': 'SEOUL ELECTRONICS&TELECOM', '3897D6': 'Hangzhou H3C Technologies Co., Limited', '58AC78': 'Cisco Systems, Inc', '541E56': 'Juniper Networks', '1CA770': 'SHENZHEN CHUANGWEI-RGB ELECTRONICS CO.,LTD', 'F8CAB8': 'Dell Inc.', '5C571A': 'ARRIS Group, Inc.', 'E8892C': 'ARRIS Group, Inc.', '94877C': 'ARRIS Group, Inc.', '407009': 'ARRIS Group, Inc.', '083E0C': 'ARRIS Group, Inc.', '8896B6': 'Global Fire Equipment S.A.', '88A25E': 'Juniper Networks', '207355': 'ARRIS Group, Inc.', 'F8EDA5': 'ARRIS Group, Inc.', '5465DE': 'ARRIS Group, Inc.', '6CCA08': 'ARRIS Group, Inc.', '78719C': 'ARRIS Group, Inc.', 'D40598': 'ARRIS Group, Inc.', '000D0B': 'BUFFALO.INC', '001D73': 'BUFFALO.INC', '001601': 'BUFFALO.INC', '7403BD': 'BUFFALO.INC', '3C36E4': 'ARRIS Group, Inc.', '1C1B68': 'ARRIS Group, Inc.', '000423': 'Intel Corporation', '001111': 'Intel Corporation', '001302': 'Intel Corporate', 'B8FC9A': 'Le Shi Zhi Xin Electronic Technology (Tianjin) Limited', '780AC7': 'Baofeng TV Co., Ltd.', '0018FE': 'Hewlett Packard', '001A4B': 'Hewlett Packard', '002481': 'Hewlett Packard', 'E83381': 'ARRIS Group, Inc.', '8C7F3B': 'ARRIS Group, Inc.', 'D40B1A': 'HTC Corporation', 'A45D36': 'Hewlett Packard', 'F0921C': 'Hewlett Packard', 'A0481C': 'Hewlett Packard', 'A01D48': 'Hewlett Packard', '40A8F0': 'Hewlett Packard', '8851FB': 'Hewlett Packard', 'C44044': 'RackTop Systems Inc.', '4CA161': 'Rain Bird Corporation', '00805F': 'Hewlett Packard', '288023': 'Hewlett Packard', '082E5F': 'Hewlett Packard', 'E4115B': 'Hewlett Packard', '28924A': 'Hewlett Packard', '000F61': 'Hewlett Packard', '0014C2': 'Hewlett Packard', 'CC3E5F': 'Hewlett Packard', 'D89D67': 'Hewlett Packard', '480FCF': 'Hewlett Packard', '9060F1': 'Apple, Inc.', '8CAB8E': 'Shanghai Feixun Communication Co.,Ltd.', 'A4516F': 'Microsoft Mobile Oy', 'FC64BA': 'Xiaomi Communications Co Ltd', '4CD08A': 'HUMAX Co., Ltd.', 'CC4EEC': 'HUMAX Co., Ltd.', '403DEC': 'HUMAX Co., Ltd.', 'EC4D47': 'HUAWEI TECHNOLOGIES CO.,LTD', '945330': 'Hon Hai Precision Ind. Co.,Ltd.', 'A08D16': 'HUAWEI TECHNOLOGIES CO.,LTD', '00242B': 'Hon Hai Precision Ind. Co.,Ltd.', 'EC26CA': 'TP-LINK TECHNOLOGIES CO.,LTD.', '9471AC': 'TCT mobile ltd', '940C6D': 'TP-LINK TECHNOLOGIES CO.,LTD.', '647002': 'TP-LINK TECHNOLOGIES CO.,LTD.', '10FEED': 'TP-LINK TECHNOLOGIES CO.,LTD.', '645601': 'TP-LINK TECHNOLOGIES CO.,LTD.', '542758': 'Motorola (Wuhan) Mobility Technologies Communication Co., Ltd.', 'F8D111': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'B0487A': 'TP-LINK TECHNOLOGIES CO.,LTD.', '001E73': 'zte corporation', '0015EB': 'zte corporation', '001C25': 'Hon Hai Precision Ind. Co.,Ltd.', '00197E': 'Hon Hai Precision Ind. Co.,Ltd.', '2C088C': 'HUMAX Co., Ltd.', '1C994C': 'Murata Manufacturing Co., Ltd.', 'F02765': 'Murata Manufacturing Co., Ltd.', '5CF8A1': 'Murata Manufacturing Co., Ltd.', '90FBA6': 'Hon Hai Precision Ind. Co.,Ltd.', '4437E6': 'Hon Hai Precision Ind. Co.,Ltd.', 'CCAF78': 'Hon Hai Precision Ind. Co.,Ltd.', 'F4B7E2': 'Hon Hai Precision Ind. Co.,Ltd.', '785968': 'Hon Hai Precision Ind. Co.,Ltd.', '000480': 'Brocade Communications Systems, Inc.', '44A7CF': 'Murata Manufacturing Co., Ltd.', '0013E0': 'Murata Manufacturing Co., Ltd.', '000CDB': 'Brocade Communications Systems, Inc.', '001BED': 'Brocade Communications Systems, Inc.', '000533': 'Brocade Communications Systems, Inc.', '006069': 'Brocade Communications Systems, Inc.', '0060DF': 'Brocade Communications Systems, Inc.', '000088': 'Brocade Communications Systems, Inc.', 'ECCB30': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F4DCF9': 'HUAWEI TECHNOLOGIES CO.,LTD', '00664B': 'HUAWEI TECHNOLOGIES CO.,LTD', '08181A': 'zte corporation', 'A49947': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C8D15E': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F4559C': 'HUAWEI TECHNOLOGIES CO.,LTD', '80B686': 'HUAWEI TECHNOLOGIES CO.,LTD', '10C61F': 'HUAWEI TECHNOLOGIES CO.,LTD', '9CC172': 'HUAWEI TECHNOLOGIES CO.,LTD', '247F3C': 'HUAWEI TECHNOLOGIES CO.,LTD', '581F28': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C07009': 'HUAWEI TECHNOLOGIES CO.,LTD', '8038BC': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C4072F': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F48E92': 'HUAWEI TECHNOLOGIES CO.,LTD', '241FA0': 'HUAWEI TECHNOLOGIES CO.,LTD', 'CC96A0': 'HUAWEI TECHNOLOGIES CO.,LTD', '308730': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C057BC': 'Avaya Inc', '64A7DD': 'Avaya Inc', 'A4251B': 'Avaya Inc', '44322A': 'Avaya Inc', '7038EE': 'Avaya Inc', '703018': 'Avaya Inc', 'F80113': 'HUAWEI TECHNOLOGIES CO.,LTD', '646A52': 'Avaya Inc', '080028': 'Texas Instruments', '405FC2': 'Texas Instruments', '20CD39': 'Texas Instruments', 'DCD2FC': 'HUAWEI TECHNOLOGIES CO.,LTD', '9017AC': 'HUAWEI TECHNOLOGIES CO.,LTD', '18C58A': 'HUAWEI TECHNOLOGIES CO.,LTD', '34CDBE': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D8490B': 'HUAWEI TECHNOLOGIES CO.,LTD', '68DFDD': 'Xiaomi Communications Co Ltd', '98FAE3': 'Xiaomi Communications Co Ltd', '9C28EF': 'HUAWEI TECHNOLOGIES CO.,LTD', '7054F5': 'HUAWEI TECHNOLOGIES CO.,LTD', 'EC24B8': 'Texas Instruments', '7CEC79': 'Texas Instruments', '689E19': 'Texas Instruments', 'E0E5CF': 'Texas Instruments', '00107B': 'Cisco Systems, Inc', '0050E2': 'Cisco Systems, Inc', 'F0B429': 'Xiaomi Communications Co Ltd', 'B4994C': 'Texas Instruments', '00173B': 'Cisco Systems, Inc', '006083': 'Cisco Systems, Inc', '0090A6': 'Cisco Systems, Inc', '009086': 'Cisco Systems, Inc', '005080': 'Cisco Systems, Inc', '005073': 'Cisco Systems, Inc', 'F872EA': 'Cisco Systems, Inc', 'D0C789': 'Cisco Systems, Inc', 'F84F57': 'Cisco Systems, Inc', '7C69F6': 'Cisco Systems, Inc', 'F02929': 'Cisco Systems, Inc', '20BBC0': 'Cisco Systems, Inc', '4C4E35': 'Cisco Systems, Inc', 'BC1665': 'Cisco Systems, Inc', '001BD7': 'Cisco SPVTG', 'E4D3F1': 'Cisco Systems, Inc', '006009': 'Cisco Systems, Inc', '00067C': 'Cisco Systems, Inc', '00E0F7': 'Cisco Systems, Inc', '00900C': 'Cisco Systems, Inc', '00905F': 'Cisco Systems, Inc', '00100B': 'Cisco Systems, Inc', '8478AC': 'Cisco Systems, Inc', '04DAD2': 'Cisco Systems, Inc', '78DA6E': 'Cisco Systems, Inc', '1C872C': 'ASUSTek COMPUTER INC.', '60182E': 'ShenZhen Protruly Electronic Ltd co.', 'C4143C': 'Cisco Systems, Inc', '3C08F6': 'Cisco Systems, Inc', '501CBF': 'Cisco Systems, Inc', 'B000B4': 'Cisco Systems, Inc', 'F45FD4': 'Cisco SPVTG', '2CABA4': 'Cisco SPVTG', '1C6E4C': 'Logistic Service & Engineering Co.,Ltd', '0013D4': 'ASUSTek COMPUTER INC.', '20CF30': 'ASUSTek COMPUTER INC.', '00248C': 'ASUSTek COMPUTER INC.', '002354': 'ASUSTek COMPUTER INC.', '78BAF9': 'Cisco Systems, Inc', '0022CE': 'Cisco SPVTG', '000F66': 'Cisco-Linksys, LLC', 'E0899D': 'Cisco Systems, Inc', 'C47295': 'Cisco Systems, Inc', '544A00': 'Cisco Systems, Inc', '00E16D': 'Cisco Systems, Inc', 'E0D173': 'Cisco Systems, Inc', '24374C': 'Cisco SPVTG', '001E8C': 'ASUSTek COMPUTER INC.', 'B8782E': 'Apple, Inc.', '000502': 'Apple, Inc.', 'E4AA5D': 'Cisco Systems, Inc', 'E80688': 'Apple, Inc.', '7CC537': 'Apple, Inc.', '78CA39': 'Apple, Inc.', '18E7F4': 'Apple, Inc.', 'DCEB94': 'Cisco Systems, Inc', '84B517': 'Cisco Systems, Inc', '188B9D': 'Cisco Systems, Inc', '00264A': 'Apple, Inc.', '041E64': 'Apple, Inc.', '90840D': 'Apple, Inc.', '70CD60': 'Apple, Inc.', '8C7B9D': 'Apple, Inc.', '000A95': 'Apple, Inc.', '001124': 'Apple, Inc.', '34BDC8': 'Cisco Systems, Inc', '002241': 'Apple, Inc.', 'B4F0AB': 'Apple, Inc.', '80929F': 'Apple, Inc.', '9C04EB': 'Apple, Inc.', '5C969D': 'Apple, Inc.', 'A8968A': 'Apple, Inc.', '9803D8': 'Apple, Inc.', 'D89E3F': 'Apple, Inc.', 'B8C75D': 'Apple, Inc.', '5C95AE': 'Apple, Inc.', '842999': 'Apple, Inc.', '74E2F5': 'Apple, Inc.', 'E0C97A': 'Apple, Inc.', '444C0C': 'Apple, Inc.', 'F41BA1': 'Apple, Inc.', '041552': 'Apple, Inc.', 'EC852F': 'Apple, Inc.', '00F4B9': 'Apple, Inc.', '60C547': 'Apple, Inc.', '68A86D': 'Apple, Inc.', '0C74C2': 'Apple, Inc.', '403004': 'Apple, Inc.', '7CC3A1': 'Apple, Inc.', '7073CB': 'Apple, Inc.', '689C70': 'Apple, Inc.', '380F4A': 'Apple, Inc.', '3010E4': 'Apple, Inc.', 'A886DD': 'Apple, Inc.', 'F0C1F1': 'Apple, Inc.', '60D9C7': 'Apple, Inc.', '3CAB8E': 'Apple, Inc.', '609217': 'Apple, Inc.', '84B153': 'Apple, Inc.', 'E06678': 'Apple, Inc.', '00F76F': 'Apple, Inc.', 'C88550': 'Apple, Inc.', 'E0B52D': 'Apple, Inc.', '6C94F8': 'Apple, Inc.', 'CC785F': 'Apple, Inc.', '88CB87': 'Apple, Inc.', '685B35': 'Apple, Inc.', '2CB43A': 'Apple, Inc.', '907240': 'Apple, Inc.', 'F82793': 'Apple, Inc.', '908D6C': 'Apple, Inc.', 'B8098A': 'Apple, Inc.', '48D705': 'Apple, Inc.', '68D93C': 'Apple, Inc.', '4C7C5F': 'Apple, Inc.', '68644B': 'Apple, Inc.', '843835': 'Apple, Inc.', '8C006D': 'Apple, Inc.', 'C81EE7': 'Apple, Inc.', 'A43135': 'Apple, Inc.', '7014A6': 'Apple, Inc.', '985AEB': 'Apple, Inc.', '78D75F': 'Apple, Inc.', 'C0CECD': 'Apple, Inc.', 'F44B2A': 'Cisco SPVTG', '5CE3B6': 'Fiberhome Telecommunication Technologies Co.,LTD', '90C99B': 'Tesorion Nederland B.V.', '5CADCF': 'Apple, Inc.', 'BC6C21': 'Apple, Inc.', 'C4ADF1': 'GOPEACE Inc.', '58FC73': 'Arria Live Media, Inc.', '0C1A10': 'Acoustic Stream', 'C4EF70': 'Home Skinovations', '746F19': 'ICARVISIONS (SHENZHEN) TECHNOLOGY CO., LTD.', '7C5A67': 'JNC Systems, Inc.', '2CAE2B': 'Samsung Electronics Co.,Ltd', 'A0F9E0': 'VIVATEL COMPANY LIMITED', 'F8C372': 'TSUZUKI DENKI', 'D47208': 'Bragi GmbH', 'C869CD': 'Apple, Inc.', 'A4B805': 'Apple, Inc.', 'E81363': 'Comstock RD, Inc.', '741865': 'Shanghai DareGlobal Technologies Co.,Ltd', '3C7A8A': 'ARRIS Group, Inc.', 'F40E22': 'Samsung Electronics Co.,Ltd', 'C01173': 'Samsung Electronics Co.,Ltd', '7853F2': 'ROXTON Ltd.', 'BCE63F': 'Samsung Electronics Co.,Ltd', '7C9122': 'Samsung Electronics Co.,Ltd', '6CEBB2': 'Dongguan Sen DongLv Electronics Co.,Ltd', 'A87285': 'IDT, INC.', 'B49D0B': 'BQ', '3C8CF8': 'TRENDnet, Inc.', '080A4E': 'Planet Bingo® — 3rd Rock Gaming®', '044169': 'GoPro', 'C02DEE': 'Cuff', '780541': 'Queclink Wireless Solutions Co., Ltd', 'F898B9': 'HUAWEI TECHNOLOGIES CO.,LTD', '5CB559': 'CNEX Labs', 'B83A9D': 'Alarm.com', '9023EC': 'Availink, Inc.', '441CA8': 'Hon Hai Precision Ind. Co.,Ltd.', '881B99': 'SHENZHEN XIN FEI JIA ELECTRONIC CO. LTD.', 'ACBC32': 'Apple, Inc.', '544E90': 'Apple, Inc.', 'A4A6A9': 'Private', '8C10D4': 'Sagemcom Broadband SAS', '6858C5': 'ZF TRW Automotive', 'F4E926': 'Tianjin Zanpu Technology Inc.', '906F18': 'Private', '98CB27': 'Galore Networks Pvt. Ltd.', 'CC794A': 'BLU Products Inc.', '94D859': 'TCT mobile ltd', '2CFCE4': 'CTEK Sweden AB', 'B4293D': 'Shenzhen Urovo Technology Co.,Ltd.', '54FF82': 'Davit Solution co.', '50DF95': 'Lytx', '2827BF': 'Samsung Electronics Co.,Ltd', 'E855B4': 'SAI Technology Inc.', '340CED': 'Moduel AB', '9CA69D': 'Whaley Technology Co.Ltd', '5853C0': 'Beijing Guang Runtong Technology Development Company co.,Ltd', '245BF0': 'Liteon, Inc.', '2CA539': 'Parallel Wireless, Inc', '247260': 'IOTTECH Corp', 'E8F2E2': 'LG Innotek', '20635F': 'Abeeway', '083A5C': 'Junilab, Inc.', '300C23': 'zte corporation', 'C0EE40': 'Laird Technologies', '188EF9': 'G2C Co. Ltd.', 'C47D46': 'FUJITSU LIMITED', '1005B1': 'ARRIS Group, Inc.', 'C0B713': 'Beijing Xiaoyuer Technology Co. Ltd.', 'F4B8A7': 'zte corporation', '6CE01E': 'Modcam AB', '74852A': 'PEGATRON CORPORATION', 'B8B3DC': 'DEREK (SHAOGUAN) LIMITED', '702A7D': 'EpSpot AB', '4CAE31': 'ShengHai Electronics (Shenzhen) Ltd', 'F4E9D4': 'QLogic Corporation', '4CEEB0': 'SHC Netzwerktechnik GmbH', '44F436': 'zte corporation', '800184': 'HTC Corporation', '185D9A': 'BobjGear LLC', '4CB76D': 'Novi Security', '609C9F': 'Brocade Communications Systems, Inc.', 'A8827F': 'CIBN Oriental Network(Beijing) CO.,Ltd', 'B8C3BF': 'Henan Chengshi NetWork Technology Co.,Ltd', '1CB72C': 'ASUSTek COMPUTER INC.', '40B837': 'Sony Mobile Communications Inc', '44C69B': 'Wuhan Feng Tian Information Network CO.,LTD', 'D048F3': 'DATTUS Inc', 'C02567': 'Nexxt Solutions', 'FCE33C': 'HUAWEI TECHNOLOGIES CO.,LTD', '9CB6D0': 'Rivet Networks', '40B89A': 'Hon Hai Precision Ind. Co.,Ltd.', '44962B': 'Aidon Oy', '84F129': 'Metrascale Inc.', 'B89ACD': 'ELITE OPTOELECTRONIC(ASIA)CO.,LTD', 'D468BA': 'Shenzhen Sundray Technologies Company Limited', '086266': 'ASUSTek COMPUTER INC.', '9C3066': 'RWE Effizienz GmbH', 'A45602': 'fenglian Technology Co.,Ltd.', 'C8C50E': 'Shenzhen Primestone Network Technologies.Co., Ltd.', 'D06A1F': 'BSE CO.,LTD.', 'E807BF': 'SHENZHEN BOOMTECH INDUSTRY CO.,LTD', 'B008BF': 'Vital Connect, Inc.', 'D4522A': 'TangoWiFi.com', 'E076D0': 'AMPAK Technology, Inc.', '700136': 'FATEK Automation Corporation', 'FCA22A': 'PT. Callysta Multi Engineering', '18BDAD': 'L-TECH CORPORATION', '60E6BC': 'Sino-Telecom Technology Co.,Ltd.', 'F42C56': 'SENOR TECH CO LTD', 'FCDC4A': 'G-Wearables Corp.', '1C14B3': 'Airwire Technologies', 'F01E34': 'ORICO Technologies Co., Ltd', '94E2FD': 'Boge Kompressoren OTTO Boge GmbH & Co. KG', '6CF5E8': 'Mooredoll Inc.', 'A89008': 'Beijing Yuecheng Technology Co. Ltd.', 'DCE026': 'Patrol Tag, Inc', 'B40566': 'SP Best Corporation Co., LTD.', '1CC72D': 'Shenzhen Huapu Digital CO.,Ltd', '8CBFA6': 'Samsung Electronics Co.,Ltd', 'C8A823': 'Samsung Electronics Co.,Ltd', 'D85DE2': 'Hon Hai Precision Ind. Co.,Ltd.', '3C912B': 'Vexata Inc', '346C0F': 'Pramod Telecom Pvt. Ltd', '183864': 'CAP-TECH INTERNATIONAL CO., LTD.', 'C0335E': 'Microsoft', 'B0E03C': 'TCT mobile ltd', 'BC1485': 'Samsung Electronics Co.,Ltd', '9C6C15': 'Microsoft Corporation', 'B0C559': 'Samsung Electronics Co.,Ltd', 'A48CDB': 'Lenovo', 'D0929E': 'Microsoft Corporation', '84CFBF': 'Fairphone', 'ACD1B8': 'Hon Hai Precision Ind. Co.,Ltd.', 'E8447E': 'Bitdefender SRL', 'B0495F': 'OMRON HEALTHCARE Co., Ltd.', '4CA928': 'Insensi', '445ECD': 'Razer Inc', 'DC0914': 'Talk-A-Phone Co.', '00A509': 'WigWag Inc.', '7491BD': 'Four systems Co.,Ltd.', '742EFC': 'DirectPacket Research, Inc,', '60F189': 'Murata Manufacturing Co., Ltd.', 'D43266': 'Fike Corporation', '900CB4': 'Alinket Electronic Technology Co., Ltd', '48C093': 'Xirrus, Inc.', 'A0C2DE': 'Costar Video Systems', '88E161': 'Art Beijing Science and Technology Development Co., Ltd.', 'F0FE6B': 'Shanghai High-Flying Electronics Technology Co., Ltd', '3CAE69': 'ESA Elektroschaltanlagen Grimma GmbH', '00F3DB': 'WOO Sports', 'BC52B4': 'Nokia', '9405B6': 'Liling FullRiver Electronics & Technology Ltd', '78312B': 'zte corporation', '00A2F5': 'Guangzhou Yuanyun Network Technology Co.,Ltd', '10FACE': 'Reacheng Communication Technology Co.,Ltd', '3438AF': 'Inlab Software GmbH', 'B4A828': 'Shenzhen Concox Information Technology Co., Ltd', 'C81B6B': 'Innova Security', '44CE7D': 'SFR', '344DEA': 'zte corporation', '94BF95': 'Shenzhen Coship Electronics Co., Ltd', 'E42354': 'SHENZHEN FUZHI SOFTWARE TECHNOLOGY CO.,LTD', '9470D2': 'WINFIRM TECHNOLOGY', 'A44AD3': 'ST Electronics(Shanghai) Co.,Ltd', '1008B1': 'Hon Hai Precision Ind. Co.,Ltd.', 'E48C0F': 'Discovery Insure', '4C16F1': 'zte corporation', 'D8FB11': 'AXACORE', '30FAB7': 'Tunai Creative', '0809B6': 'Masimo Corp', '4CF5A0': 'Scalable Network Technologies Inc', 'C4BD6A': 'SKF GmbH', 'C401CE': 'PRESITION (2000) CO., LTD.', '587BE9': 'AirPro Technology India Pvt. Ltd', 'CC3080': 'VAIO Corporation', '187117': 'eta plus electronic gmbh', '7CB177': 'Satelco AG', 'EC0EC4': 'Hon Hai Precision Ind. Co.,Ltd.', '8C18D9': 'Shenzhen RF Technology Co., Ltd', '70F196': 'Actiontec Electronics, Inc', '188219': 'Alibaba Cloud Computing Ltd.', '4CE933': 'RailComm, LLC', 'CCE17F': 'Juniper Networks', 'B4B859': 'Texa Spa', '5CF9F0': 'Atomos Engineering P/L', 'D0A0D6': 'Chengdu TD Tech Ltd.', 'ECB907': 'CloudGenix Inc', 'EC1D7F': 'zte corporation', 'AC3870': 'Lenovo Mobile Communication Technology Ltd.', '600292': 'PEGATRON CORPORATION', '3C46D8': 'TP-LINK TECHNOLOGIES CO.,LTD.', '147590': 'TP-LINK TECHNOLOGIES CO.,LTD.', '50BD5F': 'TP-LINK TECHNOLOGIES CO.,LTD.', '28A5EE': 'Shenzhen SDGI CATV Co., Ltd', 'E4C62B': 'Airware', '4CBC42': 'Shenzhen Hangsheng Electronics Co.,Ltd.', '987E46': 'Emizon Networks Limited', 'F4D032': 'Yunnan Ideal Information&Technology.,Ltd', '0C8C8F': 'Kamo Technology Limited', 'A4A4D3': 'Bluebank Communication Technology Co.Ltd', '702DD1': 'Newings Communication CO., LTD.', 'F4F646': 'Dediprog Technology Co. Ltd.', 'F42833': 'MMPC Inc.', '4C83DE': 'Cisco SPVTG', 'A81374': 'Panasonic Corporation AVC Networks Company', '28E6E9': 'SIS Sat Internet Services GmbH', 'F4FD2B': 'ZOYI Company', '3C189F': 'Nokia Corporation', 'A8329A': 'Digicom Futuristic Technologies Ltd.', '083D88': 'Samsung Electronics Co.,Ltd', 'BC4E5D': 'ZhongMiao Technology Co., Ltd.', '7C6AC3': 'GatesAir, Inc', '7CC4EF': 'Devialet', 'D85DFB': 'Private', '109266': 'Samsung Electronics Co.,Ltd', '045C8E': 'gosund GROUP CO.,LTD', '08CD9B': 'samtec automotive electronics & software GmbH', '30595B': 'streamnow AG', '84850A': 'Hella Sonnen- und Wetterschutztechnik GmbH', '300D2A': 'Zhejiang Wellcom Technology Co.,Ltd.', '5C5BC2': 'YIK Corporation', '908C63': 'GZ Weedong Networks Technology Co. , Ltd', 'DC38E1': 'Juniper Networks', 'E8EF89': 'OPMEX Tech.', 'DCF110': 'Nokia Corporation', '608F5C': 'Samsung Electronics Co.,Ltd', 'F84A73': 'EUMTECH CO., LTD', '142BD6': 'Guangdong Appscomm Co.,Ltd', 'D46761': 'United Gulf Gate Co.', 'A47E39': 'zte corporation', 'FCC2DE': 'Murata Manufacturing Co., Ltd.', '40167E': 'ASUSTek COMPUTER INC.', '983713': 'PT.Navicom Indonesia', '3481C4': 'AVM GmbH', 'CCB691': 'NECMagnusCommunications', 'D0C7C0': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'EC2E4E': 'HITACHI-LG DATA STORAGE INC', '98349D': 'Krauss Maffei Technologies GmbH', '880FB6': 'Jabil Circuits India Pvt Ltd,-EHTP unit', 'B46698': 'Zealabs srl', '687CC8': 'Measurement Systems S. de R.L.', '74F85D': 'Berkeley Nucleonics Corp', '4C7F62': 'Nokia Corporation', '580528': 'LABRIS NETWORKS', 'D881CE': 'AHN INC.', 'E0D31A': 'EQUES Technology Co., Limited', '0092FA': 'SHENZHEN WISKY TECHNOLOGY CO.,LTD', '30C750': 'MIC Technology Group', '4411C2': 'Telegartner Karl Gartner GmbH', '8059FD': 'Noviga', '100F18': 'Fu Gang Electronic(KunShan)CO.,LTD', '18CC23': 'Philio Technology Corporation', 'B061C7': 'Ericsson-LG Enterprise', '400107': 'Arista Corp', '407875': 'IMBEL - Industria de Material Belico do Brasil', '5CE7BF': 'New Singularity International Technical Development Co.,Ltd', '386C9B': 'Ivy Biomedical', 'B42C92': 'Zhejiang Weirong Electronic Co., Ltd', '28C825': 'DellKing Industrial Co., Ltd', '80618F': 'Shenzhen sangfei consumer communications co.,ltd', '447E76': 'Trek Technology (S) Pte Ltd', 'B0EC8F': 'GMX SAS', '987770': 'Pep Digital Technology (Guangzhou) Co., Ltd', '28DEF6': 'bioMerieux Inc.', 'D82A15': 'Leitner SpA', '50B695': 'Micropoint Biotechnologies,Inc.', 'B4430D': 'Broadlink Pty Ltd', '50A054': 'Actineon', 'B48547': 'Amptown System Company GmbH', '68D247': 'Portalis LC', 'C064C6': 'Nokia Corporation', '14F28E': 'ShenYang ZhongKe-Allwin Technology Co.LTD', 'BC14EF': 'ITON Technology Limited', '5056A8': 'Jolla Ltd', 'A06518': 'VNPT TECHNOLOGY', '7C8D91': 'Shanghai Hongzhuo Information Technology co.,LTD', '10DDF4': 'Maxway Electronics CO.,LTD', '080371': 'KRG CORPORATE', '200E95': 'IEC – TC9 WG43', '748F1B': 'MasterImage 3D', '083F76': 'Intellian Technologies, Inc.', 'A07771': 'Vialis BV', 'C8F68D': 'S.E.TECHNOLOGIES LIMITED', '387B47': 'AKELA, Inc.', '6C641A': 'Penguin Computing', 'CC89FD': 'Nokia Corporation', '34466F': 'HiTEM Engineering', 'D87CDD': 'SANIX INCORPORATED', '707C18': 'ADATA Technology Co., Ltd', '78EC74': 'Kyland-USA', 'C0F991': 'GME Standard Communications P/L', 'E8E770': 'Warp9 Tech Design, Inc.', '609620': 'Private', '98DA92': 'Vuzix Corporation', 'E40439': 'TomTom Software Ltd', '0C1262': 'zte corporation', '3CD4D6': 'WirelessWERX, Inc', '443C9C': 'Pintsch Tiefenbach GmbH', 'D0C42F': 'Tamagawa Seiki Co.,Ltd.', '549359': 'SHENZHEN TWOWING TECHNOLOGIES CO.,LTD.', '90356E': 'Vodafone Omnitel N.V.', '284430': 'GenesisTechnical Systems (UK) Ltd', '844F03': 'Ablelink Electronics Ltd', '783D5B': 'TELNET Redes Inteligentes S.A.', 'D0B523': 'Bestcare Cloucal Corp.', '24A495': 'Thales Canada Inc.', 'E097F2': 'Atomax Inc.', '70305E': 'Nanjing Zhongke Menglian Information Technology Co.,LTD', 'C098E5': 'University of Michigan', '50E14A': 'Private', '5C1193': 'Seal One AG', '847616': 'Addat s.r.o.', 'DC0575': 'SIEMENS ENERGY AUTOMATION', '701D7F': 'Comtech Technology Co., Ltd.', '9C039E': 'Beijing Winchannel Software Technology Co., Ltd', '3C1040': 'daesung network', '28FC51': 'The Electric Controller and Manufacturing Co., LLC', '407496': 'aFUN TECHNOLOGY INC.', '708D09': 'Nokia Corporation', '98FB12': 'Grand Electronics (HK) Ltd', '705986': 'OOO TTV', '680AD7': 'Yancheng Kecheng Optoelectronic Technology Co., Ltd', 'BC8893': 'VILLBAU Ltd.', '986CF5': 'zte corporation', '906717': 'Alphion India Private Limited', '6064A1': 'RADiflow Ltd.', '9CF8DB': 'shenzhen eyunmei technology co,.ltd', '447BC4': 'DualShine Technology(SZ)Co.,Ltd', '58B961': 'SOLEM Electronique', '78491D': 'The Will-Burt Company', 'F46ABC': 'Adonit Corp. Ltd.', '0C473D': 'Hitron Technologies. Inc', '8CCDA2': 'ACTP, Inc.', '84262B': 'Nokia', '20D21F': 'Wincal Technology Corp.', 'F89550': 'Proton Products Chengdu Ltd', '706173': 'Calantec GmbH', '7C49B9': 'Plexus Manufacturing Sdn Bhd', '58639A': 'TPL SYSTEMES', '187ED5': 'shenzhen kaism technology Co. Ltd', '841B38': 'Shenzhen Excelsecu Data Technology Co.,Ltd', '4CCBF5': 'zte corporation', '44700B': 'IFFU', '54A54B': 'NSC Communications Siberia Ltd', 'BC2B6B': 'Beijing Haier IC Design Co.,Ltd', '98D331': 'Shenzhen Bolutek Technology Co.,Ltd.', '38EC11': 'Novatek Microelectronics Corp.', '1C7B21': 'Sony Mobile Communications Inc', '6CF97C': 'Nanoptix Inc.', 'F8FF5F': 'Shenzhen Communication Technology Co.,Ltd', 'E0AF4B': 'Pluribus Networks, Inc.', '840F45': 'Shanghai GMT Digital Technologies Co., Ltd', '2C5FF3': 'Pertronic Industries', '1C4158': 'Gemalto M2M GmbH', '9C2840': 'Discovery Technology,LTD..', '28C671': 'Yota Devices OY', 'D86960': 'Steinsvik', '08EF3B': 'MCS Logic Inc.', 'E8EADA': 'Denkovi Assembly Electronics LTD', 'F85BC9': 'M-Cube Spa', '04848A': '7INOVA TECHNOLOGY LIMITED', '7CB77B': 'Paradigm Electronics Inc', 'B0CE18': 'Zhejiang shenghui lighting co.,Ltd', '102279': 'ZeroDesktop, Inc.', '7C1AFC': 'Dalian Co-Edifice Video Technology Co., Ltd', '907A0A': 'Gebr. Bode GmbH & Co KG', 'A0C6EC': 'ShenZhen ANYK Technology Co.,LTD', '78E8B6': 'zte corporation', 'F08EDB': 'VeloCloud Networks', 'F47A4E': 'Woojeon&Handan', 'EC2257': 'JiangSu NanJing University Electronic Information Technology Co.,Ltd', 'F037A1': 'Huike Electronics (SHENZHEN) CO., LTD.', 'C0A39E': 'EarthCam, Inc.', '704CED': 'TMRG, Inc.', '109AB9': 'Tosibox Oy', '142D8B': 'Incipio Technologies, Inc', '68EE96': 'Cisco SPVTG', '78D38D': 'HONGKONG YUNLINK TECHNOLOGY LIMITED', 'DCAE04': 'CELOXICA Ltd', '8005DF': 'Montage Technology Group Limited', '1078CE': 'Hanvit SI, Inc.', 'D8DA52': 'APATOR S.A.', '587A4D': 'Stonesoft Corporation', '84E629': 'Bluwan SA', '681D64': 'Sunwave Communications Co., Ltd', '4C21D0': 'Sony Mobile Communications Inc', 'C47F51': 'Inventek Systems', 'A897DC': 'IBM', 'E8481F': 'Advanced Automotive Antennas', 'A09BBD': 'Total Aviation Solutions Pty Ltd', 'D40BB9': 'Solid Semecs bv.', 'F415FD': 'Shanghai Pateo Electronic Equipment Manufacturing Co., Ltd.', 'CCD29B': 'Shenzhen Bopengfa Elec&Technology CO.,Ltd', '3495DB': 'Logitec Corporation', '9CB793': 'Creatcomm Technology Inc.', '5C335C': 'Swissphone Telecom AG', '70E027': 'HONGYU COMMUNICATION TECHNOLOGY LIMITED', 'FC35E6': 'Visteon corp', '04DF69': 'Car Connectivity Consortium', '78DAB3': 'GBO Technology', '700FEC': 'Poindus Systems Corp.', 'F02405': 'OPUS High Technology Corporation', 'D41090': 'iNFORM Systems AG', '78D5B5': 'NAVIELEKTRO KY', 'E47D5A': 'Beijing Hanbang Technology Corp.', 'E4F7A1': 'Datafox GmbH', '105C3B': 'Perma-Pipe, Inc.', '349D90': 'Heinzmann GmbH & CO. KG', 'D862DB': 'Eno Inc.', 'C47DFE': 'A.N. Solutions GmbH', 'CCBD35': 'Steinel GmbH', '6CECA1': 'SHENZHEN CLOU ELECTRONICS CO. LTD.', 'B03850': 'Nanjing CAS-ZDC IOT SYSTEM CO.,LTD', '748E08': 'Bestek Corp.', '78F5E5': 'BEGA Gantenbrink-Leuchten KG', '8C3C07': 'Skiva Technologies, Inc.', '38A86B': 'Orga BV', 'F07765': 'Sourcefire, Inc', '1441E2': 'Monaco Enterprises, Inc.', 'ECD040': 'GEA Farm Technologies GmbH', 'F80DEA': 'ZyCast Technology Inc.', 'B08807': 'Strata Worldwide', '249504': 'SFR', 'F45842': 'Boxx TV Ltd', '106682': 'NEC Platforms, Ltd.', '246278': 'sysmocom - systems for mobile communications GmbH', 'DC647C': 'C.R.S. iiMotion GmbH', '148692': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'A8154D': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'F084C9': 'zte corporation', 'D4016D': 'TP-LINK TECHNOLOGIES CO.,LTD.', '985C93': 'SBG Systems SAS', 'A08A87': 'HuiZhou KaiYue Electronic Co.,Ltd', '28CD9C': 'Shenzhen Dynamax Software Development Co.,Ltd.', 'C0C3B6': 'Automatic Systems', 'A0EB76': 'AirCUVE Inc.', 'FC4499': 'Swarco LEA d.o.o.', '5CF370': 'CC&C Technologies, Inc', 'A4E0E6': 'FILIZOLA S.A. PESAGEM E AUTOMACAO', '381766': 'PROMZAKAZ LTD.', '18E8DD': 'MODULETEK', 'D073D5': 'LIFI LABS MANAGEMENT PTY LTD', '149448': 'BLU CASTLE S.A.', '48F925': 'Maestronic', '386793': 'Asia Optical Co., Inc.', '0C8268': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'D81EDE': 'B&W Group Ltd', '24EA40': 'Helmholz GmbH & Co. KG', 'D429EA': 'Zimory GmbH', '34ADE4': 'Shanghai Chint Power Systems Co., Ltd.', '3C94D5': 'Juniper Networks', '68831A': 'Pandora Mobility Corporation', 'FCDB96': 'ENERVALLEY CO., LTD', '1423D7': 'EUTRONIX CO., LTD.', '90DA4E': 'AVANU', '281878': 'Microsoft Corporation', '7038B4': 'Low Tech Solutions', 'DC6F08': 'Bay Storage Technology', '745F00': 'Samsung Semiconductor Inc.', 'E0C3F3': 'zte corporation', '5C20D0': 'Asoni Communication Co., Ltd.', 'ACA430': 'Peerless AV', '504F94': 'Loxone Electronics GmbH', '60B185': 'ATH system', '48F230': 'Ubizcore Co.,LTD', '78324F': 'Millennium Group, Inc.', '384369': 'Patrol Products Consortium LLC', '847A88': 'HTC Corporation', 'A4D856': 'Gimbal, Inc', '785517': 'SankyuElectronics', 'B47F5E': 'Foresight Manufacture (S) Pte Ltd', 'A0FE91': 'AVAT Automation GmbH', '74ECF1': 'Acumen', '5809E5': 'Kivic Inc.', 'BC629F': 'Telenet Systems P. Ltd.', '380FE4': 'Dedicated Network Partners Oy', '44184F': 'Fitview', '84ACA4': 'Beijing Novel Super Digital TV Technology Co., Ltd', '541FD5': 'Advantage Electronics', 'ACE97F': 'IoT Tech Limited', 'B85AF7': 'Ouya, Inc', '34F62D': 'SHARP Corporation', '4C8FA5': 'Jastec', '84ED33': 'BBMC Co.,Ltd', 'D0D6CC': 'Wintop', '58D071': 'BW Broadcast', '1C52D6': 'FLAT DISPLAY TECHNOLOGY CORPORATION', 'D0DFB2': 'Genie Networks Limited', '386645': 'OOSIC Technology CO.,Ltd', 'C0B339': 'Comigo Ltd.', 'E82E24': 'Out of the Fog Research LLC', '80FA5B': 'CLEVO CO.', '20858C': 'Assa', '9CE1D6': 'Junger Audio-Studiotechnik GmbH', '48B9C2': 'Teletics Inc.', 'D8182B': 'Conti Temic Microelectronic GmbH', '80CF41': 'Lenovo Mobile Communication Technology Ltd.', '58D6D3': 'Dairy Cheq Inc', '046E49': 'TaiYear Electronic Technology (Suzhou) Co., Ltd', 'B49DB4': 'Axion Technologies Inc.', 'ACE87E': 'Bytemark Computer Consulting Ltd', '60CDC5': 'Taiwan Carol Electronics., Ltd', 'E85AA7': 'LLC Emzior', '9C9C1D': 'Starkey Labs Inc.', 'AC3CB4': 'Nilan A/S', '2C3BFD': 'Netstor Technology Co., Ltd.', '080FFA': 'KSP INC.', 'DCB058': 'Bürkert Werke GmbH', '6C5A34': 'Shenzhen Haitianxiong Electronic Co., Ltd.', '9038DF': 'Changzhou Tiannengbo System Co. Ltd.', '185253': 'Pixord Corporation', '8007A2': 'Esson Technology Inc.', 'C0A0E2': 'Eden Innovations', 'E8ABFA': 'Shenzhen Reecam Tech.Ltd.', '6C9AC9': 'Valentine Research, Inc.', '303294': 'W-IE-NE-R Plein & Baus GmbH', '7C822D': 'Nortec', '10FBF0': 'KangSheng LTD.', 'ACA22C': 'Baycity Technologies Ltd', '683B1E': 'Countwise LTD', 'AC8D14': 'Smartrove Inc', 'AC7236': 'Lexking Technology Co., Ltd.', '3CD7DA': 'SK Mtek microelectronics(shenzhen)limited', '04F8C2': 'Flaircomm Microelectronics, Inc.', '5887E2': 'Shenzhen Coship Electronics Co., Ltd.', 'F46DE2': 'zte corporation', '808287': 'ATCOM Technology Co.Ltd.', '28A186': 'enblink', '60748D': 'Atmaca Elektronik', 'B8B7D7': '2GIG Technologies', '78D129': 'Vicos', '6869F2': 'ComAp s.r.o.', 'B85AFE': 'Handaer Communication Technology (Beijing) Co., Ltd', 'F0ACA4': 'HBC-radiomatic', '388EE7': 'Fanhattan LLC', '2091D9': "I'M SPA", '141BF0': 'Intellimedia Systems Ltd', '503955': 'Cisco SPVTG', '84DF0C': 'NET2GRID BV', '78AB60': 'ABB Australia', '8482F4': 'Beijing Huasun Unicreate Technology Co., Ltd', '5CD41B': 'UCZOON Technology Co., LTD', '2CEDEB': 'Alpheus Digital Company Limited', '00DB1E': 'Albedo Telecom SL', '34AF2C': 'Nintendo Co., Ltd.', 'CCE798': 'My Social Stuff', 'A036F0': 'Comprehensive Power', '180CAC': 'CANON INC.', '18863A': 'DIGITAL ART SYSTEM', '0CD996': 'Cisco Systems, Inc', '30F33A': '+plugg srl', '98291D': 'Jaguar de Mexico, SA de CV', '0CDCCC': 'Inala Technologies', 'A4B1E9': 'Technicolor', '60455E': 'Liptel s.r.o.', 'D806D1': 'Honeywell Fire System (Shanghai) Co,. Ltd.', '647657': 'Innovative Security Designs', '7CD9FE': 'New Cosmos Electric Co., Ltd.', 'E49069': 'Rockwell Automation', 'CCC104': 'Applied Technical Systems', '4C72B9': 'PEGATRON CORPORATION', '907025': 'Garea Microsys Co.,Ltd.', '10D1DC': 'INSTAR Deutschland GmbH', 'BC811F': 'Ingate Systems', 'D867D9': 'Cisco Systems, Inc', '944A09': 'BitWise Controls', 'BC28D6': 'Rowley Associates Limited', '10BD18': 'Cisco Systems, Inc', '5869F9': 'Fusion Transactive Ltd.', 'D41E35': 'TOHO Electronics INC.', '98A7B0': 'MCST ZAO', '642216': 'Shandong Taixin Electronic co.,Ltd', '34996F': 'VPI Engineering', '4C068A': 'Basler Electric Company', 'D08CFF': 'UPWIS AB', '68CE4E': 'L-3 Communications Infrared Products', '68D1FD': 'Shenzhen Trimax Technology Co.,Ltd', '9C066E': 'Hytera Communications Corporation Limited', '048B42': 'Skspruce Technologies', '5076A6': 'Ecil Informatica Ind. Com. Ltda', 'A44C11': 'Cisco Systems, Inc', '60843B': 'Soladigm, Inc.', 'F8A03D': 'Dinstar Technologies Co., Ltd.', '2CD444': 'FUJITSU LIMITED', '0C57EB': 'Mueller Systems', '745327': 'COMMSEN CO., LIMITED', 'E856D6': 'NCTech Ltd', 'C08170': 'Effigis GeoSolutions', '60B933': 'Deutron Electronics Corp.', '8CC7AA': 'Radinet Communications Inc.', '40336C': 'Godrej & Boyce Mfg. co. ltd', '9C611D': 'Omni-ID USA, Inc.', '489153': 'Weinmann Geräte für Medizin GmbH + Co. KG', 'AC9403': 'Envision Peripherals Inc', '443839': 'Cumulus Networks, inc', 'A4E731': 'Nokia Corporation', '3CEAFB': 'NSE AG', '0036F8': 'Conti Temic microelectronic GmbH', 'A4F7D0': 'LAN Accessories Co., Ltd.', 'AC4BC8': 'Juniper Networks', '209BA5': 'JIAXING GLEAD Electronics Co.,Ltd', 'A0F450': 'HTC Corporation', '68D925': 'ProSys Development Services', '848D84': 'Rajant Corporation', 'D8337F': 'Office FA.com Co.,Ltd.', '6089B1': 'Key Digital Systems', '44D15E': 'Shanghai Kingto Information Technology Ltd', '0036FE': 'SuperVision', '54466B': 'Shenzhen CZTIC Electronic Technology Co., Ltd ', '44B382': 'Kuang-chi Institute of Advanced Technology', '709E86': 'X6D Limited', 'A0F419': 'Nokia Corporation', 'D4EC0C': 'Harley-Davidson Motor Company', '6CA96F': 'TransPacket AS', 'AC0142': 'Uriel Technologies SIA', '542A9C': 'LSY Defense, LLC.', '1C973D': 'PRICOM Design', 'BC0200': 'Stewart Audio', 'C47BA3': 'NAVIS Inc.', 'F44848': 'Amscreen Group Ltd', '50D274': 'Steffes Corporation', '0043FF': 'KETRON S.R.L.', '7CACB2': 'Bosch Software Innovations GmbH', '18D66A': 'Inmarsat', '28E608': 'Tokheim', '74FF7D': 'Wren Sound Systems, LLC', 'ACF0B2': 'Becker Electronics Taiwan Ltd.', 'C85645': 'Intermas France', 'F85063': 'Verathon', 'F0D14F': 'LINEAR LLC', '0CA138': 'Blinq Wireless Inc.', '5C6F4F': 'S.A. SISTEL', '901B0E': 'Fujitsu Technology Solutions GmbH', '3C3888': 'ConnectQuest, llc', '48D7FF': 'BLANKOM Antennentechnik GmbH', 'C47130': 'Fon Technology S.L.', '504A5E': 'Masimo Corporation', '1C7C45': 'Vitek Industrial Video Products, Inc.', '44348F': 'MXT INDUSTRIAL LTDA', 'C8F704': 'Building Block Video', '508A42': 'Uptmate Technology Co., LTD', 'BCEA2B': 'CityCom GmbH', '2C36F8': 'Cisco Systems, Inc', '845787': 'DVR C&C Co., Ltd.', '5808FA': 'Fiber Optic & telecommunication INC.', 'AC3D05': 'Instorescreen Aisa', '0C9E91': 'Sankosha Corporation', '383F10': 'DBL Technology Ltd.', 'ACD364': 'ABB SPA, ABB SACE DIV.', '286094': 'CAPELEC', 'A45630': 'Cisco Systems, Inc', 'C8F9F9': 'Cisco Systems, Inc', 'B826D4': 'Furukawa Industrial S.A. Produtos Elétricos', 'B87447': 'Convergence Technologies', '7463DF': 'VTS GmbH', 'BC125E': 'Beijing WisVideo INC.', '14E4EC': 'mLogic LLC', '3828EA': 'Fujian Netcom Technology Co., LTD', 'A086EC': 'SAEHAN HITEC Co., Ltd', '942E17': 'Schneider Electric Canada Inc', '045A95': 'Nokia Corporation', '04F4BC': 'Xena Networks', '6C3A84': 'Shenzhen Aero-Startech. Co.Ltd', 'D01AA7': 'UniPrint', '846AED': 'Wireless Tsukamoto.,co.LTD', 'E05DA6': 'Detlef Fink Elektronik & Softwareentwicklung', '80DB31': 'Power Quotient International Co., Ltd.', '1C51B5': 'Techaya LTD', '00D632': 'GE Energy', 'C43C3C': 'CYBELEC SA', '9CB008': 'Ubiquitous Computing Technology Corporation', 'A8776F': 'Zonoff', '648788': 'Juniper Networks', '00FA3B': 'CLOOS ELECTRONIC GMBH', '2CEE26': 'Petroleum Geo-Services', 'A4EF52': 'Telewave Co., Ltd.', 'A826D9': 'HTC Corporation', '28940F': 'Cisco Systems, Inc', 'B8DAF7': 'Advanced Photonics, Inc.', '143AEA': 'Dynapower Company LLC', 'B4D8A9': 'BetterBots', 'CC944A': 'Pfeiffer Vacuum GmbH', '0C8525': 'Cisco Systems, Inc', '98FE03': 'Ericsson - North America', '20AA4B': 'Cisco-Linksys, LLC', '2838CF': 'Gen2wave', '7CC8D7': 'Damalisk', 'C46044': 'Everex Electronics Limited', 'E03C5B': 'SHENZHEN JIAXINJIE ELECTRON CO.,LTD', '24BC82': 'Dali Wireless, Inc.', '087572': 'Obelux Oy', '60B606': 'Phorus', '9092B4': 'Diehl BGT Defence GmbH & Co. KG', 'FC455F': 'JIANGXI SHANSHUI OPTOELECTRONIC TECHNOLOGY CO.,LTD', 'F04A2B': 'PYRAMID Computer GmbH', '9C53CD': 'ENGICAM s.r.l.', '608645': 'Avery Weigh-Tronix, LLC', 'FC8FC4': 'Intelligent Technology Inc.', '541DFB': 'Freestyle Energy Ltd', '10C2BA': 'UTT Co., Ltd.', '90D74F': 'Bookeen', '64C5AA': 'South African Broadcasting Corporation', 'CC6DEF': 'TJK Tietolaite Oy', '98AAD7': 'BLUE WAVE NETWORKING CO LTD', '10FC54': 'Shany Electronic Co., Ltd. ', 'C02973': 'Audyssey Laboratories Inc.', '78FE3D': 'Juniper Networks', 'E039D7': 'Plexxi, Inc.', '500B32': 'Foxda Technology Industrial(ShenZhen)Co.,LTD', '24C0B3': 'RSF', '603553': 'Buwon Technology', 'A85BF3': 'Audivo GmbH', 'B8975A': "BIOSTAR Microtech Int'l Corp.", '4833DD': 'ZENNIO AVANCE Y TECNOLOGIA, S.L.', '28D1AF': 'Nokia Corporation', '48A6D2': 'GJsun Optical Science and Tech Co.,Ltd.', '7C336E': 'MEG Electronics Inc.', '182B05': '8D Technologies', '08A12B': 'ShenZhen EZL Technology Co., Ltd', 'B40C25': 'Palo Alto Networks', '64E84F': 'Serialway Communication Technology Co. Ltd', '2C9EFC': 'CANON INC.', 'F8F7D3': 'International Communications Corporation', '302DE8': 'JDA, LLC (JDA Systems)', 'D4E33F': 'Nokia', '147DC5': 'Murata Manufacturing Co., Ltd.', '00B9F6': 'Shenzhen Super Rich Electronics Co.,Ltd', 'FCC23D': 'Atmel Corporation', '88E7A6': 'iKnowledge Integration Corp.', '28AF0A': 'Sirius XM Radio Inc', '5CD4AB': 'Zektor', '08FC52': 'OpenXS BV', 'F8462D': 'SYNTEC Incorporation', '78A5DD': 'Shenzhen Smarteye Digital Electronics Co., Ltd', 'ECE744': 'Omntec mfg. inc', '4C32D9': 'M Rutty Holdings Pty. Ltd.', '68CD0F': 'U Tek Company Limited', '0462D7': 'ALSTOM HYDRO FRANCE', 'D4507A': 'CEIVA Logic, Inc', '603FC5': 'COX CO., LTD', 'A4E391': 'DENY FONTAINE', 'AC6FD9': 'Valueplus Inc.', 'DC1EA3': 'Accensus LLC', 'A40130': 'ABIsystems Co., LTD', '90A783': 'JSW PACIFIC CORPORATION ', 'A446FA': 'AmTRAN Video Corporation', 'CCE7DF': 'American Magnetics, Inc.', '2804E0': 'FERMAX ELECTRONICA S.A.U.', 'A00CA1': 'SKTB SKiT', '10F9EE': 'Nokia Corporation', '742B0F': 'Infinidat Ltd.', 'C8F981': 'Seneca s.r.l.', '20EEC6': 'Elefirst Science & Tech Co ., ltd', 'E01E07': 'Anite Telecoms US. Inc', '7C6B33': 'Tenyu Tech Co. Ltd.', '64D989': 'Cisco Systems, Inc', 'D4024A': 'Delphian Systems LLC', '0041B4': 'Wuxi Zhongxing Optoelectronics Technology Co.,Ltd.', 'F44450': 'BND Co., Ltd.', 'EC4670': 'Meinberg Funkuhren GmbH & Co. KG', '644346': 'GuangDong Quick Network Computer CO.,LTD', '240BB1': 'KOSTAL Industrie Elektrik GmbH ', '14307A': 'Avermetrics', '645DD7': 'Shenzhen Lifesense Medical Electronics Co., Ltd. ', 'A06CEC': 'RIM', 'D05A0F': 'I-BT DIGITAL CO.,LTD', 'EC9681': '2276427 Ontario Inc', '5C076F': 'Thought Creator', '3C0FC1': 'KBC Networks', '58E636': 'EVRsafe Technologies', '24497B': 'Innovative Converged Devices Inc', '98E79A': 'Foxconn(NanJing) Communication Co.,Ltd.', 'A0E9DB': 'Ningbo FreeWings Technologies Co.,Ltd', '788973': 'CMC', '203706': 'Cisco Systems, Inc', '7CF429': 'NUUO Inc. ', 'F4B164': 'Lightning Telecommunications Technology Co. Ltd', '70B035': 'Shenzhen Zowee Technology Co., Ltd', '8821E3': 'Nebusens, S.L.', '90B97D': 'Johnson Outdoors Marine Electronics d/b/a Minnkota', '1045BE': 'Norphonic AS', 'AC8ACD': 'ROGER D.Wensker, G.Wensker sp.j.', '984246': 'SOL INDUSTRY PTE., LTD', '3429EA': 'MCD ELECTRONICS SP. Z O.O.', '28A574': 'Miller Electric Mfg. Co.', '8C82A8': 'Insigma Technology Co.,Ltd', 'CCB55A': 'Fraunhofer ITWM', 'AC4723': 'Genelec', 'E8BA70': 'Cisco Systems, Inc', 'D4A425': 'SMAX Technology Co., Ltd.', '60190C': 'RRAMAC', 'D05FCE': 'Hitachi Data Systems', 'F80332': 'Khomp', '8C11CB': 'ABUS Security-Center GmbH & Co. KG', 'D09B05': 'Emtronix', '281471': 'Lantis co., LTD.', '24470E': 'PentronicAB', '68876B': 'INQ Mobile Limited', '90B8D0': 'Joyent, Inc.', '8C4435': 'Shanghai BroadMobi Communication Technology Co., Ltd.', 'F81D93': 'Longdhua(Beijing) Controls Technology Co.,Ltd', '1CAA07': 'Cisco Systems, Inc', '685B36': 'POWERTECH INDUSTRIAL CO., LTD.', 'CCF841': 'Lumewave', 'FC8329': 'Trei technics', '14EB33': 'BSMediasoft Co., Ltd.', 'F4B549': 'Xiamen Yeastar Information Technology Co., Ltd.', '88B168': 'Delta Control GmbH', 'AC8674': 'Open Mesh, Inc.', '94D93C': 'ENELPS', 'B8BEBF': 'Cisco Systems, Inc', '64B64A': 'ViVOtech, Inc.', '28EE2C': 'Frontline Test Equipment', '782EEF': 'Nokia Corporation', '7CF0BA': 'Linkwell Telesystems Pvt Ltd', 'D8DF0D': 'beroNet GmbH', '204005': 'feno GmbH', 'D46F42': 'WAXESS USA Inc', '300B9C': 'Delta Mobile Systems, Inc.', '04C5A4': 'Cisco Systems, Inc', '64D241': 'Keith & Koep GmbH', '18B79E': 'Invoxia', 'ACF97E': 'ELESYS INC.', '38D135': 'EasyIO Corporation Sdn. Bhd.', '6CAD3F': 'Hubbell Building Automation, Inc.', '901900': 'SCS SA', 'D45D42': 'Nokia Corporation', 'B03829': 'Siliconware Precision Industries Co., Ltd.', '7C6C39': 'PIXSYS SRL', 'BC2846': 'NextBIT Computing Pvt. Ltd.', 'BC0F2B': 'FORTUNE TECHGROUP CO.,LTD', '18B3BA': 'Netlogic AB', '8C5FDF': 'Beijing Railway Signal Factory', 'D47B75': 'HARTING Electronics GmbH', 'B01B7C': 'Ontrol A.S.', '648125': 'Alphatron Marine BV', '8CF9C9': 'MESADA Technology Co.,Ltd.', '042605': 'GFR Gesellschaft für Regelungstechnik und Energieeinsparung mbH', '44E4D9': 'Cisco Systems, Inc', 'ACCA54': 'Telldus Technologies AB', '94E226': 'D. ORtiz Consulting, LLC', '30EB25': 'INTEK DIGITAL', '24F0FF': 'GHT Co., Ltd.', '9CC0D2': 'Conductix-Wampfler GmbH', 'C0626B': 'Cisco Systems, Inc', 'EC986C': 'Lufft Mess- und Regeltechnik GmbH', 'D093F8': 'Stonestreet One LLC', '9C645E': 'Harman Consumer Group', '1C334D': 'ITS Telecom', 'D8C99D': 'EA DISPLAY LIMITED', '1083D2': 'Microseven Systems, LLC', '34684A': 'Teraworks Co., Ltd.', 'CCFC6D': 'RIZ TRANSMITTERS', 'E03E7D': 'data-complex GmbH', '0CC6AC': 'DAGS', '303955': 'Shenzhen Jinhengjia Electronic Co., Ltd.', 'FC5B24': 'Weibel Scientific A/S', '503DE5': 'Cisco Systems, Inc', '540496': 'Gigawave LTD', 'CCF67A': 'Ayecka Communication Systems LTD', '4CB9C8': 'CONET CO., LTD.', '8065E9': 'BenQ Corporation', 'DCD87F': 'Shenzhen JoinCyber Telecom Equipment Ltd', 'B4E0CD': 'Fusion-io, Inc', '286046': 'Lantech Communications Global, Inc.', '10E2D5': 'Qi Hardware Inc.', '20FECD': 'System In Frontier Inc.', 'F0933A': 'NxtConect', '60C980': 'Trymus', 'A036FA': 'Ettus Research LLC', 'EC836C': 'RM Tech Co., Ltd.', '70DDA1': 'Tellabs', '94D019': 'Cydle Corp.', '8C278A': 'Vocollect Inc', '78593E': 'RAFI GmbH & Co.KG', '509772': 'Westinghouse Digital', 'E41C4B': 'V2 TECHNOLOGY, INC.', '5CF3FC': 'IBM Corp', 'EC4644': 'TTK SAS', '8CB64F': 'Cisco Systems, Inc', '204AAA': 'Hanscan Spain S.A.', '500E6D': 'TrafficCast International', 'EC3BF0': 'NovelSat', '4CEDDE': 'ASKEY COMPUTER CORP', 'E8E08F': 'GRAVOTECH MARKING SAS', 'B8415F': 'ASP AG', '2CB69D': 'RED Digital Cinema', 'ACAB8D': 'Lyngso Marine A/S', '6083B2': 'GkWare e.K.', '80D019': 'Embed, Inc', '68EBC5': 'Angstrem Telecom', 'A0B5DA': 'HongKong THTF Co., Ltd', '8886A0': 'Simton Technologies, Ltd.', 'A45055': 'BUSWARE.DE', '582F42': 'Universal Electric Corporation', '0474A1': 'Aligera Equipamentos Digitais Ltda', '5C6984': 'NUVICO', 'CC0CDA': 'Miljovakt AS', 'A86A6F': 'RIM', 'A89B10': 'inMotion Ltd.', 'B41489': 'Cisco Systems, Inc', '68122D': 'Special Instrument Development Co., Ltd.', '94F720': 'Tianjin Deviser Electronics Instrument Co., Ltd', 'DC9C52': 'Sapphire Technology Limited.', '4891F6': 'Shenzhen Reach software technology CO.,LTD', '649B24': 'V Technology Co., Ltd.', '846EB1': 'Park Assist LLC', '6C504D': 'Cisco Systems, Inc', 'B8D06F': 'GUANGZHOU HKUST FOK YING TUNG RESEARCH INSTITUTE', 'EC14F6': 'BioControl AS', 'E8995A': 'PiiGAB, Processinformation i Goteborg AB', '401D59': 'Biometric Associates, LP', 'B8FF6F': 'Shanghai Typrotech Technology Co.Ltd', '1CBD0E': 'Amplified Engineering Pty Ltd', 'A0F217': 'GE Medical System(China) Co., Ltd. ', 'F0A764': 'GST Co., Ltd.', '1C0656': 'IDY Corporation', '5C17D3': 'LGE ', '1CDF0F': 'Cisco Systems, Inc', '68BDAB': 'Cisco Systems, Inc', '9CADEF': 'Obihai Technology, Inc.', '7076F0': 'LevelOne Communications (India) Private Limited', '8CE7B3': 'Sonardyne International Ltd', '088DC8': 'Ryowa Electronics Co.,Ltd', 'F8B599': 'Guangzhou CHNAVS Digital Technology Co.,Ltd', 'B8921D': 'BG T&A', '80C6CA': 'Endian s.r.l.', 'C88B47': 'Nolangroup S.P.A con Socio Unico', 'C4CD45': 'Beijing Boomsense Technology CO.,LTD.', '54FDBF': 'Scheidt & Bachmann GmbH', 'D08999': 'APCON, Inc.', 'C88447': 'Beautiful Enterprise Co., Ltd', 'E061B2': 'HANGZHOU ZENOINTEL TECHNOLOGY CO., LTD', 'D0BB80': 'SHL Telemedicine International Ltd.', 'A4A80F': 'Shenzhen Coship Electronics Co., Ltd.', '94E711': 'Xirka Dama Persada PT', '90903C': 'TRISON TECHNOLOGY CORPORATION', '9411DA': 'ITF Fröschl GmbH', '8039E5': 'PATLITE CORPORATION', '14FEAF': 'SAGITTAR LIMITED', 'C02BFC': 'iNES. applied informatics GmbH', '94C7AF': 'Raylios Technology', 'D81C14': 'Compacta International, Ltd.', '008C10': 'Black Box Corp.', '641E81': 'Dowslake Microsystems', 'EC542E': 'Shanghai XiMei Electronic Technology Co. Ltd', 'F0E5C3': 'Drägerwerk AG & Co. KG aA', 'DC7B94': 'Cisco Systems, Inc', '5CCA32': 'Theben AG', 'D48FAA': 'Sogecam Industrial, S.A.', 'B081D8': 'I-sys Corp', '6C9B02': 'Nokia Corporation', '7415E2': 'Tri-Sen Systems Corporation', 'ECC38A': 'Accuenergy (CANADA) Inc', 'D82986': 'Best Wish Technology LTD', '446132': 'ecobee inc', 'E05B70': 'Innovid, Co., Ltd.', '043604': 'Gyeyoung I&T', 'A4B2A7': 'Adaxys Solutions AG', 'D0D0FD': 'Cisco Systems, Inc', 'B0B8D5': 'Nanjing Nengrui Auto Equipment CO.,Ltd', 'D84606': 'Silicon Valley Global Marketing', '40520D': 'Pico Technology', '807D1B': 'Neosystem Co. Ltd.', '34E0D7': 'DONGGUAN QISHENG ELECTRONICS INDUSTRIAL CO., LTD', 'F41F0B': 'YAMABISHI Corporation', 'A082C7': 'P.T.I Co.,LTD', '988EDD': 'TE Connectivity Limerick', '98FC11': 'Cisco-Linksys, LLC', 'D466A8': 'Riedo Networks Ltd', 'FCD4F6': 'Messana Air.Ray Conditioning s.r.l.', '78A714': 'Amphenol', 'F450EB': 'Telechips Inc', 'E87AF3': 'S5 Tech S.r.l.', '141BBD': 'Volex Inc.', 'CC5C75': 'Weightech Com. Imp. Exp. Equip. Pesagem Ltda', '1C6F65': 'GIGA-BYTE TECHNOLOGY CO.,LTD.', '90E0F0': 'IEEE 1722a Working Group', 'F893F3': 'VOLANS', '7866AE': 'ZTEC Instruments, Inc.', '4C022E': 'CMR KOREA CO., LTD', '34AAEE': 'Mikrovisatos Servisas UAB', '48FCB8': 'Woodstream Corporation', 'A4AE9A': 'Maestro Wireless Solutions ltd.', 'C848F5': 'MEDISON Xray Co., Ltd', '1880CE': 'Barberry Solutions Ltd', '74B9EB': 'JinQianMao Technology Co.,Ltd.', 'F8AC6D': 'Deltenna Ltd', '18B209': 'Torrey Pines Logic, Inc', 'D4000D': 'Phoenix Broadband Technologies, LLC.', 'AC5135': 'MPI TECH', '44D63D': 'Talari Networks', 'D45297': 'nSTREAMS Technologies, Inc.', 'D84B2A': 'Cognitas Technologies, Inc.', '684B88': 'Galtronics Telemetry Inc.', 'A4561B': 'MCOT Corporation', '80C63F': 'Remec Broadband Wireless , LLC', '40D40E': 'Biodata Ltd', '0C826A': 'Wuhan Huagong Genuine Optics Technology Co., Ltd', '7C051E': 'RAFAEL LTD.', 'E0271A': 'TTC Next-generation Home Network System WG', '0097FF': 'Heimann Sensor GmbH', 'E4AB46': 'UAB Selteka', '945B7E': 'TRILOBIT LTDA.', '04E548': 'Cohda Wireless Pty Ltd', '7071BC': 'PEGATRON CORPORATION', '7884EE': 'INDRA ESPACIO S.A.', '08F6F8': 'GET Engineering', '78A2A0': 'Nintendo Co., Ltd.', '10090C': 'Janome Sewing Machine Co., Ltd.', 'E01CEE': 'Bravo Tech, Inc.', '2893FE': 'Cisco Systems, Inc', 'F4C795': 'WEY Elektronik AG', '781185': 'NBS Payment Solutions Inc.', '24B6B8': 'FRIEM SPA', 'D81BFE': 'TWINLINX CORPORATION', 'FC4463': 'Universal Audio, Inc', '042F56': 'ATOCS (Shenzhen) LTD', 'E85E53': 'Infratec Datentechnik GmbH', '88BA7F': 'Qfiednet Co., Ltd.', '64DB18': 'OpenPattern', '90A2DA': 'GHEO SA', '9889ED': 'Anadem Information Inc.', 'D05875': 'Active Control Technology Inc.', 'C8EF2E': 'Beijing Gefei Tech. Co., Ltd ', '102D96': 'Looxcie Inc.', 'EC8EAD': 'DLX', '34C69A': 'Enecsys Ltd', 'B8653B': 'Bolymin, Inc.', '38E8DF': 'b gmbh medien + datenbanken', '1C129D': 'IEEE PES PSRC/SUB ', 'E0CA4D': 'Shenzhen Unistar Communication Co.,LTD', '60B3C4': 'Elber Srl', '04C880': 'Samtec Inc', '0CC9C6': 'Samwin Hong Kong Limited', '1062C9': 'Adatis GmbH & Co. KG', 'D8AE90': 'Itibia Technologies', '7830E1': 'UltraClenz, LLC', 'B09134': 'Taleo', 'A4C2AB': 'Hangzhou LEAD-IT Information & Technology Co.,Ltd', '48AA5D': 'Store Electronic Systems', 'F88DEF': 'Tenebraex', '042234': 'Wireless Standard Extensions', '80C862': 'Openpeak, Inc', '5C35DA': 'There Corporation Oy', '3C4C69': 'Infinity System S.L.', 'A06986': 'Wellav Technologies Ltd', 'F0B6EB': 'Poslab Technology Co., Ltd.', '5850E6': 'Best Buy Corporation', '3C1CBE': 'JADAK LLC', 'BCD5B6': 'd2d technologies', 'FC683E': 'Directed Perception, Inc', '28E794': 'Microtime Computer Inc.', '0CD502': 'Westell Technologies Inc.', '70828E': 'OleumTech Corporation', '1C8F8A': 'Phase Motion Control SpA', 'FCCCE4': 'Ascon Ltd.', 'A438FC': 'Plastic Logic', '18FC9F': 'Changhe Electronics Co., Ltd.', '94592D': 'EKE Building Technology Systems Ltd', 'CC69B0': 'Global Traffic Technologies, LLC', 'A0593A': 'V.D.S. Video Display Systems srl', 'CCEA1C': 'DCONWORKS Co., Ltd', '7C08D9': 'Shanghai B-Star Technology Co', '2059A0': 'Paragon Technologies Inc.', '34CE94': 'Parsec (Pty) Ltd', '24D2CC': 'SmartDrive Systems Inc.', '0CEF7C': 'AnaCom Inc', 'D0D286': 'Beckman Coulter K.K.', 'A0BFA5': 'CORESYS', 'B05B1F': 'THERMO FISHER SCIENTIFIC S.P.A.', '34EF8B': 'NTT Communications Corporation', '687F74': 'Cisco-Linksys, LLC', 'ECE9F8': 'Guang Zhou TRI-SUN Electronics Technology Co., Ltd', '38E98C': 'Reco S.p.A.', 'C4198B': 'Dominion Voting Systems Corporation', 'C83A35': 'Tenda Technology Co., Ltd.', '6C8CDB': 'Otus Technologies Ltd', '40F52E': 'Leica Microsystems (Schweiz) AG', 'E4FFDD': 'ELECTRON INDIA', '68A1B7': 'Honghao Mingchuan Technology (Beijing) CO.,Ltd.', '0CD7C2': 'Axium Technologies, Inc.', 'E84ECE': 'Nintendo Co., Ltd.', '1045F8': 'LNT-Automation GmbH', 'DCE71C': 'AUG Elektronik GmbH', 'A870A5': 'UniComm Inc.', 'F8472D': 'X2gen Digital Corp. Ltd', '849000': 'Arnold & Richter Cine Technik', '08184C': 'A. S. Thomas, Inc.', '10880F': 'Daruma Telecomunicações e Informática S.A.', 'FC6198': 'NEC Personal Products, Ltd', '74D850': 'Evrisko Systems', '54B620': 'SUHDOL E&C Co.Ltd.', '78C40E': 'H&D Wireless', '2C0623': 'Win Leader Inc.', '0C2755': 'Valuable Techologies Limited', '78998F': 'MEDILINE ITALIA SRL', '40ECF8': 'Siemens AG', 'BCB181': 'SHARP CORPORATION', 'C8873B': 'Net Optics', 'A8CE90': 'CVC', 'E41F13': 'IBM Corp', '64C6AF': 'AXERRA Networks Ltd', '701AED': 'ADVAS CO., LTD.', '6465C0': 'Nuvon, Inc', '44568D': 'PNC Technologies Co., Ltd.', '584CEE': 'Digital One Technologies, Limited', 'A07332': 'Cashmaster International Limited', '44C9A2': 'Greenwald Industries', '406186': "MICRO-STAR INT'L CO.,LTD", '7C1EB3': '2N TELEKOMUNIKACE a.s.', '4456B7': 'Spawn Labs, Inc', '906DC8': 'DLG Automação Industrial Ltda', '002721': 'Shenzhen Baoan Fenda Industrial Co., Ltd', '0026FB': 'AirDio Wireless, Inc.', '0026F5': 'XRPLUS Inc.', '0026F8': 'Golden Highway Industry Development Co., Ltd.', '0026F4': 'Nesslab', '0026DF': 'TaiDoc Technology Corp.', '0026D8': 'Magic Point Inc.', '00271A': 'Geenovo Technology Ltd.', '002714': 'Grainmustards, Co,ltd.', '002715': 'Rebound Telecom. Co., Ltd', '00270F': 'Envisionnovation Inc', '00270A': 'IEE S.A.', '002709': 'Nintendo Co., Ltd.', '0026D6': 'Ningbo Andy Optoelectronic Co., Ltd.', '0026D3': 'Zeno Information System', '0026D1': 'S Squared Innovations Inc.', '0026EE': 'TKM GmbH', '0026EF': 'Technology Advancement Group, Inc.', '0026E6': 'Visionhitech Co., Ltd.', '002701': 'INCOstartec GmbH', '0026CB': 'Cisco Systems, Inc', '0026A4': 'Novus Produtos Eletronicos Ltda', '0026A2': 'Instrumentation Technology Systems', '00269F': 'Private', '002699': 'Cisco Systems, Inc', '0026B2': 'Setrix GmbH', '0026AF': 'Duelco A/S', '0026B3': 'Thales Communications Inc', '0026A7': 'CONNECT SRL', '002667': 'CARECOM CO.,LTD.', '002647': 'WFE TECHNOLOGY CORP.', '002653': 'DaySequerra Corporation', '00266A': 'ESSENSIUM NV', '00266B': 'SHINE UNION ENTERPRISE LIMITED', '00268D': 'CellTel S.p.A.', '002687': 'corega K.K', '00267B': 'GSI Helmholtzzentrum für Schwerionenforschung GmbH', '00262A': 'Proxense, LLC', '002628': 'companytec automação e controle ltda.', '00261F': 'SAE Magnetics (H.K.) Ltd.', '00261E': 'QINGBANG ELEC(SZ) CO., LTD', '0025EA': 'Iphion BV', '0025F0': 'Suga Electronics Limited', '002640': 'Baustem Broadband Technologies, Ltd.', '0025FC': 'ENDA ENDUSTRIYEL ELEKTRONIK LTD. STI.', '0025FA': 'J&M Analytik AG', '002633': 'MIR - Medical International Research', '002630': 'ACOREL S.A.S', '0025E8': 'Idaho Technology', '0025E4': 'OMNI-WiFi, LLC', '0025FE': 'Pilot Electronics Corporation', '002619': 'FRC', '0025D2': 'InpegVision Co., Ltd', '0025C9': 'SHENZHEN HUAPU DIGITAL CO., LTD', '002585': 'KOKUYO S&T Co., Ltd.', '002581': 'x-star networks Inc.', '00259B': 'Beijing PKUNITY Microsystems Technology Co., Ltd', '002596': 'GIGAVISION srl', '002595': 'Northwest Signal Supply, Inc', '00258F': 'Trident Microsystems, Inc.', '002589': 'Hills Industries Limited', '0025D7': 'CEDO', '0025D8': 'KOREA MAINTENANCE', '0025B5': 'Cisco Systems, Inc', '0025A0': 'Nintendo Co., Ltd.', '0025BE': 'Tektrap Systems Inc.', '002551': 'SE-Elektronic GmbH', '00254D': 'Singapore Technologies Electronics Limited', '00254C': 'Videon Central, Inc.', '00255B': 'CoachComm, LLC', '00257B': 'STJ ELECTRONICS PVT LTD', '00257C': 'Huachentel Technology Development Co., Ltd', '002575': 'FiberPlex Technologies, LLC', '002570': 'Eastern Communications Company Limited', '00256A': 'inIT - Institut Industrial IT', '002562': 'interbro Co. Ltd.', '002520': 'SMA Railway Technology GmbH', '002527': 'Bitrode Corp.', '002525': 'CTERA Networks Ltd.', '002516': 'Integrated Design Tools, Inc.', '002543': 'MONEYTECH', '00253A': 'CEVA, Ltd.', '0024F7': 'Cisco Systems, Inc', '0024F3': 'Nintendo Co., Ltd.', '0024B9': 'Wuhan Higheasy Electronic Technology Development Co.Ltd', '0024BD': 'Hainzl Industriesysteme GmbH', '002510': 'Pico-Tesla Magnetic Therapies', '002501': 'JSC Supertel', '0024E5': 'Seer Technology, Inc', '0024E2': 'HASEGAWA ELECTRIC CO.,LTD.', '0024E0': 'DS Tech, LLC', '0024B6': 'Seagate Technology', '0024C8': 'Broadband Solutions Group', '0024C5': 'Meridian Audio Limited', '002496': 'Ginzinger electronic systems', '002499': 'Aquila Technologies', '00248A': 'Kaga Electronics Co., Ltd.', '002447': 'Kaztek Systems', '002443': 'Nortel Networks', '002441': 'Wanzl Metallwarenfabrik GmbH', '002458': 'PA Bastion CC', '00245D': 'Terberg besturingstechniek B.V.', '002477': 'Tibbo Technology', '00246E': 'Phihong USA Corp.', '002473': '3COM EUROPE LTD', '00246B': 'Covia, Inc.', '0024A2': 'Hong Kong Middleware Technology Limited', '002450': 'Cisco Systems, Inc', '002451': 'Cisco Systems, Inc', '00244A': 'Voyant International', '002431': 'Uni-v co.,ltd', '002432': 'Neostar Technology Co.,LTD', '002430': 'Ruby Tech Corp.', '00242E': 'Datastrip Inc.', '002410': 'NUETEQ Technology,Inc.', '0023F9': 'Double-Take Software, INC.', '0023F5': 'WILO SE', '0023FF': 'Beijing HTTC Technology Ltd.', '0023F6': 'Softwell Technology Co., Ltd.', '0023F3': 'Glocom, Inc.', '0023F0': 'Shanghai Jinghan Weighing Apparatus Co. Ltd.', '0023E9': 'F5 Networks, Inc.', '00243B': 'CSSI (S) Pte Ltd', '002435': 'WIDE CORPORATION', '00241A': 'Red Beetle Inc.', '002414': 'Cisco Systems, Inc', '002408': 'Pacific Biosciences', '00240C': 'DELEC GmbH', '00241D': 'GIGA-BYTE TECHNOLOGY CO.,LTD.', '0023EA': 'Cisco Systems, Inc', '0023E5': 'IPaXiom Networks', '0023C0': 'Broadway Networks', '0023B6': 'SECURITE COMMUNICATIONS / HONEYWELL', '0023B8': 'Sichuan Jiuzhou Electronic Technology Co.,Ltd', '0023BA': 'Chroma', '0023BC': 'EQ-SYS GmbH', '0023A1': 'Trend Electronics Ltd', '0023A6': 'E-Mon', '0023A8': 'Marshall Electronics', '00239A': 'EasyData Hardware GmbH', '002396': 'ANDES TECHNOLOGY CORPORATION', '002394': 'Samjeon', '0023CC': 'Nintendo Co., Ltd.', '002379': 'Union Business Machines Co. Ltd.', '0023B1': 'Longcheer Technology (Singapore) Pte Ltd', '00238C': 'Private', '00234F': 'Luminous Power Technologies Pvt. Ltd.', '00234B': 'Inyuan Technology Inc.', '002349': 'Helmholtz Centre Berlin for Material and Energy', '002346': 'Vestac', '002344': 'Objective Interface Systems, Inc.', '002377': 'Isotek Electronics Ltd', '002371': 'SOAM Systel', '00230E': 'Gorba AG', '002307': 'FUTURE INNOVATION TECH CO.,LTD', '00235E': 'Cisco Systems, Inc', '002358': 'SYSTEL SA', '002356': 'Packet Forensics LLC', '002350': 'RDC, Inc. dba LynTec', '00232E': 'Kedah Electronics Engineering, LLC', '002320': 'Nicira Networks', '002322': 'KISS Teknical Solutions, Inc.', '002364': 'Power Instruments Pte Ltd', '002362': 'Goldline Controls', '002317': 'Lasercraft Inc', '00233C': 'Alflex', '002333': 'Cisco Systems, Inc', '0022C7': 'SEGGER Microcontroller GmbH & Co. KG', '0022C1': 'Active Storage Inc.', '0022C2': 'Proview Eletrônica do Brasil LTDA', '0022BD': 'Cisco Systems, Inc', '0022BA': 'HUTH Elektronik Systeme GmbH', '0022AD': 'TELESIS TECHNOLOGIES, INC.', '0022AE': 'Mattel Inc.', '0022A3': 'California Eastern Laboratories', '0022BB': 'beyerdynamic GmbH & Co. KG', '0022B6': 'Superflow Technologies Group', '0022B5': 'NOVITA', '0022B2': '4RF Communications Ltd', '0022AC': 'Hangzhou Siyuan Tech. Co., Ltd', '0022D1': 'Albrecht Jung GmbH & Co. KG', '0022D2': 'All Earth Comércio de Eletrônicos LTDA.', '0022CA': 'Anviz Biometric Tech. Co., Ltd.', '002305': 'Cisco Systems, Inc', '0022FF': 'NIVIS LLC', '002302': 'Cobalt Digital, Inc.', '0022F5': 'Advanced Realtime Tracking GmbH', '0022E5': 'Fisher-Rosemount Systems Inc.', '0022E4': 'APASS TECHNOLOGY CO., LTD.', '0022D5': 'Eaton Corp. Electrical Group Data Center Solutions - Pulizzi', '0022A0': 'Delphi Corporation', '002247': 'DAC ENGINEERING CO., LTD.', '002235': 'Strukton Systems bv', '00222C': 'Ceton Corp', '00222D': 'SMC Networks Inc.', '002259': 'Guangzhou New Postcom Equipment Co.,Ltd.', '002253': 'Entorian Technologies', '00224F': 'Byzoro Networks Ltd.', '002251': 'Lumasense Technologies', '00227C': 'Woori SMT Co.,ltd', '002277': 'NEC Australia Pty Ltd', '002279': 'Nippon Conlux Co., Ltd.', '002271': 'Jäger Computergesteuerte Meßtechnik GmbH.', '00225E': 'Uwin Technologies Co.,LTD', '002258': 'Taiyo Yuden Co., Ltd.', '00225B': 'Teradici Corporation', '00223D': 'JumpGen Systems, LLC', '002239': 'Indiana Life Sciences Incorporated', '00228F': 'CNRS', '002297': 'XMOS Semiconductor', '002292': 'Cinetal', '00227E': 'Chengdu 30Kaitian Communication Industry Co.Ltd', '0021CC': 'Flextronics International', '0021CF': 'The Crypto Group', '0021C5': '3DSP Corp', '0021E6': 'Starlight Video Limited', '0021E0': 'CommAgility Ltd', '0021DE': 'Firepro Wireless', '0021F9': 'WIRECOM Technologies', '0021FA': 'A4SP Technologies Ltd.', '0021F0': 'EW3 Technologies LLC', '002211': 'Rohati Systems', '002212': 'CAI Networks, Inc.', '00220D': 'Cisco Systems, Inc', '002208': 'Certicom Corp', '002205': 'WeLink Solutions, Inc.', '002209': 'Omron Healthcare Co., Ltd', '00221F': 'eSang Technologies Co., Ltd.', '002217': 'Neat Electronics', '0021D3': 'BOCOM SECURITY(ASIA PACIFIC) LIMITED', '0021EE': 'Full Spectrum Inc.', '0021EC': 'Solutronic GmbH', '00222A': 'SoundEar A/S', '00218C': 'TopControl GMBH', '00218A': 'Electronic Design and Manufacturing Company', '00218B': 'Wescon Technology, Inc.', '002184': 'POWERSOFT SRL', '0021B3': 'Ross Controls', '0021B6': 'Triacta Power Technologies Inc.', '0021AE': 'ALCATEL-LUCENT FRANCE - WTD', '0021AF': 'Radio Frequency Systems', '0021A0': 'Cisco Systems, Inc', '002198': 'Thai Radio Co, LTD', '002178': 'Matuschek Messtechnik GmbH', '002173': 'Ion Torrent Systems, Inc.', '002177': 'W. L. Gore & Associates', '00217A': 'Sejin Electron, Inc.', '002172': 'Seoultek Valley', '002169': 'Prologix, LLC.', '0021A4': 'Dbii Networks', '002199': 'Vacon Plc', '0021B9': 'Universal Devices Inc.', '00214B': 'Shenzhen HAMP Science & Technology Co.,Ltd', '002145': 'Semptian Technologies Ltd.', '00213E': 'TomTom', '00213F': 'A-Team Technology Ltd.', '002107': 'Seowonintech Co Ltd.', '001FFE': 'HPN Supply Chain', '001FFF': 'Respironics, Inc.', '002121': 'VRmagic GmbH', '002123': 'Aerosat Avionics', '002153': 'SeaMicro Inc.', '002154': 'D-TACQ Solutions Ltd', '00210A': 'byd:sign Corporation', '00211B': 'Cisco Systems, Inc', '00212D': 'SCIMOLEX CORPORATION', '001FD7': 'TELERAD SA', '001FD3': 'RIVA Networks Inc.', '001FD5': 'MICRORISC s.r.o.', '001FFC': 'Riccius+Sohn GmbH', '001FFD': 'Indigo Mobile Technologies Corp.', '001FEE': 'ubisys technologies GmbH', '001FEF': 'SHINSEI INDUSTRIES CO.,LTD', '001FBB': 'Xenatech Co.,LTD', '001FB1': 'Cybertech Inc.', '001FCA': 'Cisco Systems, Inc', '001FEC': 'Synapse Électronique', '001FE8': 'KURUSUGAWA Electronics Industry Inc,.', '001FC3': 'SmartSynch, Inc', '001FC1': 'Hanlong Technology Co.,LTD', '001FE0': 'EdgeVelocity Corp', '001FD8': 'A-TRUST COMPUTER CORPORATION', '001FB2': 'Sontheim Industrie Elektronik GmbH', '001F44': 'GE Transportation Systems', '001F39': 'Construcciones y Auxiliar de Ferrocarriles, S.A.', '001F36': 'Bellwin Information Co. Ltd.,', '001F3D': 'Qbit GmbH', '001F38': 'POSITRON', '001F8A': 'Ellion Digital Inc.', '001F7F': 'Phabrix Limited', '001F7C': 'Witelcom AS', '001F7A': 'WiWide Inc.', '001F77': 'HEOL DESIGN', '001F76': 'AirLogic Systems Inc.', '001F73': 'Teraview Technology Co., Ltd.', '001F62': 'JSC Stilsoft', '001F67': 'Hitachi,Ltd.', '001F61': 'Talent Communication Networks Inc.', '001F52': 'UVT Unternehmensberatung fur Verkehr und Technik GmbH', '001F4F': 'Thinkware Co. Ltd.', '001FAB': 'I.S HIGH TECH.INC', '001FAC': 'Goodmill Systems Ltd', '001F94': 'Lascar Electronics Ltd', '001F8E': 'Metris USA Inc.', '001F89': 'Signalion GmbH', '001F56': 'DIGITAL FORECAST', '001EF3': 'From2', '001EE7': 'Epic Systems Inc', '001EE9': 'Stoneridge Electronics AB', '001EFD': 'Microbit 2.0 AB', '001F03': 'NUM AG', '001EFF': 'Mueller-Elektronik GmbH & Co. KG', '001F2D': 'Electro-Optical Imaging, Inc.', '001F2F': 'Berker GmbH & Co. KG', '001F32': 'Nintendo Co., Ltd.', '001F05': 'iTAS Technology Corp.', '001F07': 'AZTEQ Mobile', '001EF6': 'Cisco Systems, Inc', '001EF9': 'Pascom Kommunikations systeme GmbH.', '001F11': 'OPENMOKO, INC.', '001EE4': 'ACS Solutions France', '001EED': 'Adventiq Ltd.', '001F1C': 'KOBISHI ELECTRIC Co.,Ltd.', '001F19': 'BEN-RI ELECTRONICA S.A.', '001ECC': 'CDVI', '001EC5': 'Middle Atlantic Products Inc', '001EBE': 'Cisco Systems, Inc', '001EC3': 'Kozio, Inc.', '001EBD': 'Cisco Systems, Inc', '001E92': 'JEULIN S.A.', '001E91': 'KIMIN Electronic Co., Ltd.', '001E89': 'CRFS Limited', '001E86': 'MEL Co.,Ltd.', '001E88': 'ANDOR SYSTEM SUPPORT CO., LTD.', '001ED2': 'Ray Shine Video Technology Inc', '001ED4': 'Doble Engineering', '001ECE': 'BISA Technologies (Hong Kong) Limited', '001EC8': 'Rapid Mobile (Pty) Ltd', '001EB9': 'Sing Fai Technology Limited', '001EB2': 'LG innotek', '001EB7': 'TBTech, Co., Ltd.', '001E76': 'Thermo Fisher Scientific', '001E72': 'PCS', '001E95': 'SIGMALINK', '001E93': 'CiriTech Systems Inc', '001E9E': 'ddm hopt + schuler Gmbh + Co. KG', '001E9D': 'Recall Technologies, Inc.', '001EA2': 'Symx Systems, Inc.', '001EA9': 'Nintendo Co., Ltd.', '001E79': 'Cisco Systems, Inc', '001E09': 'ZEFATEK Co.,LTD', '001E06': 'WIBRAIN', '001E0C': 'Sherwood Information Partners, Inc.', '001E02': 'Sougou Keikaku Kougyou Co.,Ltd.', '001E01': 'Renesas Technology Sales Co., Ltd.', '001DFF': 'Network Critical Solutions Ltd', '001E3C': 'Lyngbox Media AB', '001E5B': 'Unitron Company, Inc.', '001E5E': 'COmputime Ltd.', '001E54': 'TOYO ELECTRIC Corporation', '001E4D': 'Welkin Sciences, LLC', '001E4B': 'City Theatrical', '001E4A': 'Cisco Systems, Inc', '001E31': 'INFOMARK CO.,LTD.', '001E2C': 'CyVerse Corporation', '001E26': 'Digifriends Co. Ltd', '001E69': 'Thomson Inc.', '001E5F': 'KwikByte, LLC', '001E13': 'Cisco Systems, Inc', '001E0D': 'Micran Ltd.', '001E23': 'Electronic Educational Devices, Inc', '001DD7': 'Algolith', '001DC9': 'GainSpan Corp.', '001DC7': 'L-3 Communications Geneva Aerospace', '001DEE': 'NEXTVISION SISTEMAS DIGITAIS DE TELEVISÃO LTDA.', '001DEA': 'Commtest Instruments Ltd', '001E00': 'Shantou Institute of Ultrasonic Instruments', '001DF3': 'SBS Science & Technology Co., Ltd', '001DDD': 'DAT H.K. LIMITED', '001DE4': 'Visioneered Image Systems', '001DE2': 'Radionor Communications', '001DBC': 'Nintendo Co., Ltd.', '001DB6': 'BestComm Networks, Inc.', '001DAC': 'Gigamon Systems LLC', '001DA2': 'Cisco Systems, Inc', '001D99': 'Cyan Optic, Inc.', '001D9B': 'Hokuyo Automatic Co., Ltd.', '001D86': 'Shinwa Industries(China) Ltd.', '001D88': 'Clearwire', '001D81': 'GUANGZHOU GATEWAY ELECTRONICS CO., LTD', '001D7D': 'GIGA-BYTE TECHNOLOGY CO.,LTD.', '001D66': 'Hyundai Telecom', '001D77': 'NSGate', '001D76': 'Eyeheight Ltd.', '001D7A': 'Wideband Semiconductor, Inc.', '001D68': 'Thomson Telecom Belgium', '001D4E': 'TCM Mobile LLC', '001D49': 'Innovation Wireless Inc.', '001D46': 'Cisco Systems, Inc', '001D48': 'Sensor-Technik Wiedemann GmbH', '001D47': 'Covote GmbH & Co KG', '001D41': 'Hardy Instruments', '001D3D': 'Avidyne Corporation', '001D3C': 'Muscle Corporation', '001D3A': 'mh acoustics LLC', '001D35': 'Viconics Electronics Inc.', '001D31': 'HIGHPRO INTERNATIONAL R&D CO,.LTD.', '001D89': 'VaultStor Corporation', '001D61': 'BIJ Corporation', '001D5C': 'Tom Communication Industrial Co.,Ltd.', '001D56': 'Kramer Electronics Ltd.', '001CE1': 'INDRA SISTEMAS, S.A.', '001CE0': 'DASAN TPS', '001D15': 'Shenzhen Dolphin Electronic Co., Ltd', '001D16': 'SFR', '001D11': 'Analogue & Micro Ltd', '001D12': 'ROHM CO., LTD.', '001CF1': 'SUPoX Technology Co. , LTD.', '001CF3': 'EVS BROADCAST EQUIPMENT', '001CF4': 'Media Technology Systems Inc', '001CE6': 'INNES', '001CD9': 'GlobalTop Technology Inc.', '001CDA': 'Exegin Technologies Limited', '001CD2': 'King Champion (Hong Kong) Limited', '001CCE': 'By Techdesign', '001D26': 'Rockridgesound Technology Co.', '001CF9': 'Cisco Systems, Inc', '001D03': 'Design Solutions Inc.', '001D21': 'Alcad SL', '001D1C': 'Gennet s.a.', '001C90': 'Empacket Corporation', '001C8E': 'Alcatel-Lucent IPD', '001C8F': 'Advanced Electronic Design, Inc.', '001C88': 'TRANSYSTEM INC.', '001C86': 'Cranite Systems, Inc.', '001C7F': 'Check Point Software Technologies', '001C9B': 'FEIG ELECTRONIC GmbH', '001C95': 'Opticomm Corporation', '001C97': 'Enzytek Technology Inc.,', '001CA6': 'Win4NET', '001CA9': 'Audiomatica Srl', '001CA1': 'AKAMAI TECHNOLOGIES, INC.', '001C99': 'Shunra Software Ltd.', '001C6B': 'COVAX Co. Ltd', '001C69': 'Packet Vision Ltd', '001C65': 'JoeScan, Inc.', '001C78': 'WYPLAY SAS', '001C70': 'NOVACOMM LTDA', '001C6E': 'Newbury Networks, Inc.', '001C5F': 'Winland Electronics, Inc.', '001C58': 'Cisco Systems, Inc', '001CB9': 'KWANG SUNG ELECTRONICS CO., LTD.', '001CA3': 'Terra', '001C1E': 'emtrion GmbH', '001C18': 'Sicert S.r.L.', '001C1A': 'Thomas Instrumentation, Inc', '001C0E': 'Cisco Systems, Inc', '001C13': 'OPTSYS TECHNOLOGY CO., LTD.', '001C5A': 'Advanced Relay Corporation', '001C4F': 'MACAB AB', '001C4E': 'TASA International Limited', '001C4B': 'Gener8, Inc.', '001BF4': 'KENWIN INDUSTRIAL(HK) LTD.', '001BF9': 'Intellitect Water Ltd', '001BFA': 'G.i.N. mbH', '001C3A': 'Element Labs, Inc.', '001C3D': 'WaveStorm', '001BF3': 'TRANSRADIO SenderSysteme Berlin AG', '001C21': 'Nucsafe Inc.', '001C0B': 'SmartAnt Telecom', '001C08': 'Echo360, Inc.', '001C2D': 'FlexRadio Systems', '001C2C': 'Synapse', '001C40': 'VDG-Security bv', '001BD8': 'FLIR Systems Inc', '001BD4': 'Cisco Systems, Inc', '001BD0': 'IDENTEC SOLUTIONS', '001BCD': 'DAVISCOMMS (S) PTE LTD', '001BCA': 'Beijing Run Technology LTD. Company', '001BAB': 'Telchemy, Incorporated', '001BAE': 'Micro Control Systems, Inc', '001BA8': 'UBI&MOBI,.Inc', '001BCC': 'KINGTEK CCTV ALLIANCE CO., LTD.', '001BC8': 'MIURA CO.,LTD', '001BC1': 'HOLUX Technology, Inc.', '001BDB': 'Valeo VECS', '001B88': 'Divinet Access Technologies Ltd', '001BDE': 'Renkus-Heinz, Inc.', '001BBC': 'Silver Peak Systems, Inc.', '001B97': 'Violin Technologies', '001BA0': 'Awox', '001B35': 'ChongQing JINOU Science & Technology Development CO.,Ltd', '001B36': 'Tsubata Engineering Co.,Ltd. (Head Office)', '001B39': 'Proxicast', '001B3B': 'Yi-Qing CO., LTD', '001B79': 'FAIVELEY TRANSPORT', '001B73': 'DTL Broadcast Ltd', '001B71': 'Telular Corp.', '001B4A': 'W&W Communications, Inc.', '001B44': 'SanDisk Corporation', '001B46': 'Blueone Technology Co.,Ltd', '001B40': 'Network Automation mxc AB', '001B42': 'Wise & Blue', '001B51': 'Vector Technology Corp.', '001B54': 'Cisco Systems, Inc', '001B64': 'IsaacLandKorea Co., Ltd,', '001B20': 'TPine Technology', '001B22': 'Palit Microsystems ( H.K.) Ltd.', '001B1C': 'Coherent', '001B83': 'Finsoft Ltd', '001B81': 'DATAQ Instruments, Inc.', '001B7D': 'CXR Anderson Jacobson', '001B26': 'RON-Telecom ZAO', '001B65': 'China Gridcom Co., Ltd', '001B19': 'IEEE I&M Society TC9', '001B13': 'Icron Technologies Corporation', '001AFA': 'Welch Allyn, Inc.', '001AF7': 'dataschalt e+a GmbH', '001AF3': 'Samyoung Electronics', '001AEF': 'Loopcomm Technology, Inc.', '001AEC': 'Keumbee Electronics Co.,Ltd.', '001AB9': 'PMC', '001ABA': 'Caton Overseas Limited', '001AB3': 'VISIONITE INC.', '001ACA': 'Tilera Corporation', '001B0F': 'Petratec', '001AD1': 'FARGO CO., LTD.', '001A99': 'Smarty (HZ) Information Electronics Co., Ltd', '001AA6': 'Telefunken Radio Communication Systems GmbH &CO.KG', '001A96': 'ECLER S.A.', '001AA9': 'FUJIAN STAR-NET COMMUNICATION CO.,LTD', '001AA8': 'Mamiya Digital Imaging Co., Ltd.', '001A91': 'FusionDynamic Ltd.', '001A8C': 'Sophos Ltd', '001A6D': 'Cisco Systems, Inc', '001A6E': 'Impro Technologies', '001A6C': 'Cisco Systems, Inc', '001A7B': 'Teleco, Inc.', '001A84': 'V One Multimedia Pte Ltd', '001A59': 'Ircona', '001A46': 'Digital Multimedia Technology Co., Ltd', '001A63': 'Elster Solutions, LLC,', '0019FE': 'SHENZHEN SEECOMM TECHNOLOGY CO.,LTD.', '0019FD': 'Nintendo Co., Ltd.', '001A0D': 'HandHeld entertainment, Inc.', '001A0E': 'Cheng Uei Precision Industry Co.,Ltd', '001A3E': 'Faster Technology LLC', '001A3A': 'Dongahelecomm', '001A3B': 'Doah Elecom Inc.', '001A3C': 'Technowave Ltd.', '001A40': 'A-FOUR TECH CO., LTD.', '001A01': 'Smiths Medical', '001A30': 'Cisco Systems, Inc', '001A36': 'Aipermon GmbH & Co. KG', '001A26': 'Deltanode Solutions AB', '001A25': 'DELTA DORE', '0019ED': 'Axesstel Inc.', '0019F6': 'Acconet (PTE) Ltd', '001A4A': 'Qumranet Inc.', '001A15': 'gemalto e-Payment', '001978': 'Datum Systems, Inc.', '00198A': 'Northrop Grumman Systems Corp.', '001989': 'Sonitrol Corporation', '00198E': 'Oticon A/S', '001980': 'Gridpoint Systems', '001983': 'CCT R&D Limited', '0019CD': 'Chengdu ethercom information technology Ltd.', '0019D9': 'Zeutschel GmbH', '0019CA': 'Broadata Communications, Inc', '0019D3': 'TRAK Microwave', '0019C3': 'Qualitrol', '0019BE': 'Altai Technologies Limited', '0019B4': 'Intellio Ltd', '00197F': 'PLANTRONICS, INC.', '00197A': 'MAZeT GmbH', '0019BA': 'Paradox Security Systems Ltd', '0019A1': 'LG INFORMATION & COMM.', '0019A8': 'WiQuest Communications', '001939': 'Gigamips', '00193A': 'OESOLUTIONS', '001924': 'LBNL Engineering', '00191A': 'IRLINK', '001916': 'PayTec AG', '00194C': 'Fujian Stelcom information & Technology CO.,Ltd', '00194A': 'TESTO AG', '001968': 'Digital Video Networks(Shanghai) CO. LTD.', '00196D': 'Raybit Systems Korea, Inc', '001970': 'Z-Com, Inc.', '00190E': 'Atech Technology Co., Ltd.', '001922': 'CM Comandos Lineares', '00191D': 'Nintendo Co., Ltd.', '001960': 'DoCoMo Systems, Inc.', '00195A': 'Jenaer Antriebstechnik GmbH', '00192F': 'Cisco Systems, Inc', '001950': 'Harman Multimedia', '0018C6': 'OPW Fuel Management Systems', '0018C3': 'CS Corporation', '0018CA': 'Viprinet GmbH', '0018C7': 'Real Time Automation', '0018BB': 'Eliwell Controls srl', '0018FB': 'Compro Technology', '0018EE': 'Videology Imaging Solutions, Inc.', '0018EB': 'Blue Zen Enterprises Private Limited', '0018E2': 'Topdata Sistemas de Automacao Ltda', '0018D1': 'Siemens Home & Office Comm. Devices', '0018D0': 'AtRoad, A Trimble Company', '0018D2': 'High-Gain Antennas LLC', '0018D3': 'TEAMCAST', '0018BF': 'Essence Technology Solution, Inc.', '0018B9': 'Cisco Systems, Inc', '0018B8': 'New Voice International AG', '001905': 'SCHRACK Seconet AG', '001907': 'Cisco Systems, Inc', '0018AA': 'Protec Fire Detection plc', '0018DC': 'Prostar Co., Ltd.', '001870': 'E28 Shanghai Limited', '001872': 'Expertise Engineering', '001874': 'Cisco Systems, Inc', '001869': 'KINGJIM', '001861': 'Ooma, Inc.', '00185B': 'Network Chemistry, Inc', '001890': 'RadioCOM, s.r.o.', '00188C': 'Mobile Action Technology Inc.', '00188F': 'Montgomery Technology, Inc.', '001884': 'Fon Technology S.L.', '001855': 'Aeromaritime Systembau GmbH', '001851': 'SWsoft', '001856': 'EyeFi, Inc', '00184E': 'Lianhe Technologies, Inc.', '00183B': 'CENITS Co., Ltd.', '00183C': 'Encore Software Limited', '001841': 'High Tech Computer Corp', '0018A1': 'Tiqit Computers, Inc.', '001896': 'Great Well Electronic LTD', '00184C': 'Bogen Communications', '001880': 'Maxim Integrated Products', '00187C': 'INTERCROSS, LLC', '001812': 'Beijing Xinwei Telecom Technology Co., Ltd.', '00180B': 'Brilliant Telecommunications', '00180C': 'Optelian Access Networks', '001810': 'IPTrade S.A.', '001809': 'CRESYN', '001804': 'E-TEK DIGITAL TECHNOLOGY LIMITED', '001800': 'UNIGRAND LTD', '0017FD': 'Amulet Hotkey', '0017FC': 'Suprema Inc.', '0017FB': 'FA', '0017DA': 'Spans Logic', '0017D8': 'Magnum Semiconductor, Inc.', '001826': 'Cale Access AB', '00182D': 'Artec Design', '00182A': 'Taiwan Video & Monitor', '001819': 'Cisco Systems, Inc', '0017CF': 'iMCA-GmbH', '0017B9': 'Gambro Lundia AB', '00175C': 'SHARP CORPORATION', '001759': 'Cisco Systems, Inc', '001754': 'Arkino HiTOP Corporation Limited', '001790': 'HYUNDAI DIGITECH Co, Ltd.', '001791': 'LinTech GmbH', '001795': 'Cisco Systems, Inc', '001799': 'SmarTire Systems Inc.', '001778': 'Central Music Co.', '00176B': 'Kiyon, Inc.', '00176E': 'DUCATI SISTEMI', '001774': 'Elesta GmbH', '00175F': 'XENOLINK Communications Co., Ltd.', '001777': 'Obsidian Research Corporation', '00176A': 'Avago Technologies', '00177F': 'Worldsmart Retech', '001786': 'wisembed', '001782': 'LoBenn Inc.', '0017B3': 'Aftek Infosys Limited', '0017A2': 'Camrivox Ltd.', '00179D': 'Kelman Limited', '00171E': 'Theo Benning GmbH & Co. KG', '001712': 'ISCO International', '00170C': 'Twig Com Ltd.', '00170D': 'Dust Networks Inc.', '00170B': 'Contela, Inc.', '00170F': 'Cisco Systems, Inc', '001704': 'Shinco Electronics Group Co.,Ltd', '001707': 'InGrid, Inc', '001702': 'Osung Midicom Co., Ltd', '001752': 'DAGS, Inc', '001756': 'Vinci Labs Oy', '001738': 'International Business Machines', '00172D': 'Axcen Photonics Corporation', '001724': 'Studer Professional Audio GmbH', '001741': 'DEFIDEV', '0016A1': '3Leaf Networks', '0016A4': 'Ezurio Ltd', '001699': 'Tonic DVB Marketing Ltd', '00169B': 'Alstom Transport', '001690': 'J-TEK INCORPORATION', '001693': 'PowerLink Technology Inc.', '001698': 'T&A Mobile Phones', '001696': 'QDI Technology (H.K.) Limited', '001680': 'Bally Gaming + Systems', '001681': 'Vector Informatik GmbH', '00BAC0': 'Biometric Access Company', '0016AB': 'Dansensor A/S', '0016A5': 'Tandberg Storage ASA', '0016D8': 'Senea AB', '0016D6': 'TDA Tech Pty Ltd', '0016D5': 'Synccom Co., Ltd', '0016C9': 'NAT Seattle, Inc.', '0016C6': 'North Atlantic Industries', '0016D2': 'Caspian', '0016BE': 'INFRANET, Inc.', '0016F7': 'L-3 Communications, Aviation Recorders', '001685': 'Elisa Oyj', '00165E': 'Precision I/O', '001658': 'Fusiontech Technologies Inc.', '001653': 'LEGO System A/S IE Electronics Division', '001652': 'Hoatech Technologies, Inc.', '001666': 'Quantier Communication Inc.', '001662': 'Liyuh Technology Ltd.', '001661': 'Novatium Solutions (P) Ltd', '001664': 'Prod-El SpA', '001650': ' Kratos EPD ', '00162F': 'Geutebrück GmbH', '00162B': 'Togami Electric Mfg.co.,Ltd.', '001624': 'Teneros, Inc.', '001642': 'Pangolin', '00163B': 'VertexRSI/General Dynamics', '001637': 'CITEL SpA', '001630': 'Vativ Technologies', '001608': 'Sequans Communications', '00161C': 'e:cue', '0015B8': 'Tahoe', '0015B6': 'ShinMaywa Industries, Ltd.', '0015B0': 'AUTOTELENET CO.,LTD', '0015B1': 'Ambient Corporation', '00159F': 'Terascala, Inc.', '00159E': 'Mad Catz Interactive Inc', '0015A1': 'ECA-SINTERS', '0015CB': 'Surf Communication Solutions Ltd.', '0015CC': 'UQUEST, LTD.', '0015C6': 'Cisco Systems, Inc', '0015F7': 'Wintecronics Ltd.', '0015F8': 'Kingtronics Industrial Co. Ltd.', '0015A7': 'Robatech AG', '0015DB': 'Canesta Inc.', '0015D7': 'Reti Corporation', '0015FA': 'Cisco Systems, Inc', '0015FC': 'Littelfuse Startco', '0015E6': 'MOBILE TECHNIKA Inc.', '0015C3': 'Ruf Telematik AG', '00156C': 'SANE SYSTEM CO., LTD', '00156A': 'DG2L Technologies Pvt. Ltd.', '00156F': 'Xiranet Communications GmbH', '001572': 'Red-Lemon', '001546': 'ITG Worldwide Sdn Bhd', '00153E': 'Q-Matic Sweden AB', '001542': 'MICROHARD S.R.L.', '001581': 'MAKUS Inc.', '00157A': 'Telefin S.p.A.', '001573': 'NewSoft Technology Corporation', '001575': 'Nevis Networks Inc.', '001551': 'RadioPulse Inc.', '001552': 'Wi-Gear Inc.', '00154E': 'IEC', '001550': 'Nits Technology Inc', '001567': 'RADWIN Inc.', '00155D': 'Microsoft Corporation', '001562': 'Cisco Systems, Inc', '001584': 'Schenck Process GmbH', '001593': 'U4EA Technologies Inc.', '00158D': 'Jennic Ltd', '00155B': 'Sampo Corporation', '001553': 'Cytyc Corporation', '0014ED': 'Airak, Inc.', '0014E1': 'Data Display AG', '0014E3': 'mm-lab GmbH', '0014D9': 'IP Fabrics, Inc.', '001523': 'Meteor Communications Corporation', '001524': 'Numatics, Inc.', '00151B': 'Isilon Systems Inc.', '0014F9': 'Vantage Controls', '0014F0': 'Business Security OL AB', '0014F2': 'Cisco Systems, Inc', '0014E7': 'Stolinx,. Inc', '0014E9': 'Nortech International', '001503': 'PROFIcomms s.r.o.', '001509': 'Plus Technology Co., Ltd', '001529': 'N3 Corporation', '00152D': 'TenX Networks, LLC', '001510': 'Techsphere Co., Ltd', '001513': 'EFS sas', '001490': 'ASP Corporation', '001488': 'Akorri', '001479': 'NEC Magnus Communications,Ltd.', '00147B': 'Iteris, Inc.', '00147A': 'Eubus GmbH', '0014D6': 'Jeongmin Electronics Co.,Ltd.', '0014CE': 'NF CORPORATION', '0014D0': 'BTI Systems Inc.', '0014BB': 'Open Interface North America', '0014A8': 'Cisco Systems, Inc', '0014A0': 'Accsense, Inc.', '00148B': 'Globo Electronic GmbH & Co. KG', '001484': 'Cermate Technologies Inc.', '0014AC': 'Bountiful WiFi', '001448': 'Inventec Multimedia & Telecom Corporation', '00144B': 'Hifn, Inc.', '001441': 'Innovation Sound Technology Co., LTD.', '00143C': 'Rheinmetall Canada Inc.', '00141A': 'DEICY CORPORATION', '00141C': 'Cisco Systems, Inc', '001459': 'Moram Co., Ltd.', '001457': 'T-VIPS AS', '001467': 'ArrowSpan Inc.', '00145F': 'ADITEC CO. LTD', '00146D': 'RF Technologies', '001470': 'Prokom Software SA', '001453': 'ADVANTECH TECHNOLOGIES CO.,LTD', '001454': 'Symwave', '001435': 'CityCom Corp.', '001426': 'NL Technology', '001416': 'Scosche Industries, Inc.', '0013FE': 'GRANDTEC ELECTRONIC CORP.', '0013F8': 'Dex Security Solutions', '0013F9': 'Cavera Systems', '0013F2': 'Klas Ltd', '0013DF': 'Ryvor Corp.', '0013D5': 'RuggedCom', '0013D6': 'TII NETWORK TECHNOLOGIES, INC.', '0013DB': 'SHOEI Electric Co.,Ltd', '0013C5': 'LIGHTRON FIBER-OPTIC DEVICES INC.', '0013C4': 'Cisco Systems, Inc', '0013C2': 'WACOM Co.,Ltd', '0013CD': 'MTI co. LTD', '0013D3': 'MICRO-STAR INTERNATIONAL CO., LTD.', '0013CA': 'Pico Digital', '0013BF': 'Media System Planning Corp.', '0013BB': 'Smartvue Corporation', '0013B5': 'Wavesat', '0013F7': 'SMC Networks, Inc.', '0013ED': 'PSIA', '0013AF': 'NUMA Technology,Inc.', '0013B0': 'Jablotron', '0013B1': 'Intelligent Control Systems (Asia) Pte Ltd', '0013E6': 'Technolution', '00140C': 'GKB CCTV CO., LTD.', '00133C': 'QUINTRON SYSTEMS INC.', '00133D': 'Micro Memory Curtiss Wright Co', '00133F': 'Eppendorf Instrumente GmbH', '001341': 'Shandong New Beiyang Information Technology Co.,Ltd', '001350': 'Silver Spring Networks, Inc', '00134C': 'YDT Technology International', '00139D': 'MaxLinear Hispania S.L.U.', '00138D': 'Kinghold', '00137F': 'Cisco Systems, Inc', '001382': 'Cetacea Networks Corporation', '00139A': 'K-ubique ID Corp.', '00139E': 'Ciara Technologies Inc.', '001390': 'Termtek Computer Co., Ltd', '001359': 'ProTelevision Technologies A/S', '001375': 'American Security Products Co.', '001358': 'Realm Systems, Inc.', '0012ED': 'AVG Advanced Technologies', '0012EA': 'Trane', '0012E7': 'Projectek Networking Electronics Corp.', '001331': 'CellPoint Connect', '001335': 'VS Industry Berhad', '00132F': 'Interactek', '001330': 'EURO PROTECTION SURVEILLANCE', '001325': 'Cortina Systems Inc', '0012C3': 'WIT S.A.', '0012FA': 'THX LTD', '001306': 'Always On Wireless', '0012FD': 'OPTIMUS IC S.A.', '001305': 'Epicom, Inc.', '0012E4': 'ZIEHL industrie-electronik GmbH + Co KG', '0012C1': 'Check Point Software Technologies', '0012BB': 'Telecommunications Industry Association TR-41 Committee', '0012B6': 'Santa Barbara Infrared, Inc.', '0012B9': 'Fusion Digital Technology', '0012B4': 'Work Microwave GmbH', '001297': 'O2Micro, Inc.', '00129D': 'First International Computer do Brasil', '00129C': 'Yulinet', '001290': 'KYOWA Electric & Machinery Corp.', '001291': 'KWS Computersysteme GmbH', '001295': 'Aiware Inc.', '00128B': 'Sensory Networks Inc', '00128F': 'Montilio', '0012A3': 'Trust International B.V.', '0012A7': 'ISR TECHNOLOGIES Inc', '0012AA': 'IEE, Inc.', '00129F': 'RAE Systems', '0012B5': 'Vialta, Inc.', '0012B1': 'Dai Nippon Printing Co., Ltd', '001289': 'Advance Sterilization Products', '001284': 'Lab33 Srl', '001281': 'March Networks S.p.A.', '0012C8': 'Perfect tech', '0012C6': 'TGC America, Inc', '0012CC': 'Bitatek CO., LTD', '00127E': 'Digital Lifestyles Group, Inc.', '001233': 'JRC TOKKI Co.,Ltd.', '00125B': 'KAIMEI ELECTRONI', '001259': 'THERMO ELECTRON KARLSRUHE', '00125A': 'Microsoft Corporation', '00123D': 'GES Co, Ltd', '001239': 'S Net Systems Inc.', '00122F': 'Sanei Electric Inc.', '001230': 'Picaso Infocommunication CO., LTD.', '001246': 'T.O.M TECHNOLOGY INC..', '001256': 'LG INFORMATION & COMM.', '00121D': 'Netfabric Corporation', '00126B': 'Ascalade Communications Limited', '0011C5': 'TEN Technology', '0011BE': 'AGP Telecom Co. Ltd', '0011BA': 'Elexol Pty Ltd', '0011BC': 'Cisco Systems, Inc', '0011B3': 'YOSHIMIYA CO.,LTD.', '0011E2': 'Hua Jung Components Co., Ltd.', '0011DA': 'Vivaas Technology Inc.', '0011DD': 'FROMUS TEC. Co., Ltd.', '0011EB': 'Innovative Integration', '0011EA': 'IWICS Inc.', '0011E4': 'Danelec Electronics A/S', '0011E1': 'Arcelik A.S', '0011FB': 'Heidelberg Engineering GmbH', '0011D6': 'HandEra, Inc.', '0011CA': 'Long Range Systems, Inc.', '0011CD': 'Axsun Technologies', '001214': 'Koenig & Bauer AG', '00120F': 'IEEE 802.3', '0011EF': 'Conitec Datensysteme GmbH', '00120C': 'CE-Infosys Pte Ltd', '001168': 'HomeLogic LLC', '001163': 'SYSTEM SPA DEPT. ELECTRONICS', '00115F': 'ITX Security Co., Ltd.', '0011A8': 'Quest Technologies', '0011A1': 'VISION NETWARE CO.,LTD', '0011A0': 'Vtech Engineering Canada Ltd', '00118F': 'EUTECH INSTRUMENTS PTE. LTD.', '00118D': 'Hanchang System Corp.', '001191': 'CTS-Clima Temperatur Systeme GmbH', '0011B6': 'Open Systems International', '0011B0': 'Fortelink Inc.', '0011AC': 'Simtec Electronics', '0011AD': 'Shanghai Ruijie Technology', '001182': 'IMI Norgren Ltd', '00117A': 'Singim International Corp.', '001153': 'Trident Tek, Inc.', '00114F': 'US Digital Television, Inc', '001172': 'COTRON CORPORATION', '001189': 'Aerotech Inc', '00119B': 'Telesynergy Research Inc.', '001148': 'Prolon Control Systems', '001140': 'Nanometrics Inc.', '001144': 'Assurance Technology Corp', '001101': 'CET Technologies Pte Ltd', '000FFF': 'Control4', '000FFC': 'Merit Li-Lin Ent.', '001128': 'Streamit', '00113F': 'Alcatel DI', '001137': 'AICHI ELECTRIC CO., LTD.', '001113': 'Fraunhofer FOKUS', '001112': 'Honeywell CMSS', '00113A': 'SHINBORAM', '000FFB': 'Nippon Denso Industry Co., Ltd.', '000FF2': 'Loud Technologies Inc.', '000FF1': 'nex-G Systems Pte.Ltd', '000FA4': 'Sprecher Automation GmbH', '000FA6': 'S2 Security Corporation', '000FAA': 'Nexus Technologies', '000FA8': 'Photometrics, Inc.', '000FBD': 'MRV Communications (Networks) LTD', '000FB4': 'Timespace Technology', '000F9D': 'DisplayLink (UK) Ltd', '000FE0': 'NComputing Co.,Ltd.', '000FE3': 'Damm Cellular Systems A/S', '000FCB': '3Com Ltd', '000FC5': 'KeyMed Ltd', '000FF3': 'Jung Myoung Communications&Technology', '000FBF': 'DGT Sp. z o.o.', '000FD5': 'Schwechat - RISE', '000F6E': 'BBox', '000F6F': 'FTA Communication Technologies', '000F63': 'Obzerv Technologies', '000F65': 'icube Corp.', '000F92': 'Microhard Systems Inc.', '000F89': 'Winnertec System Co., Ltd.', '000F71': 'Sanmei Electronics Co.,Ltd', '000F6B': 'GateWare Communications GmbH', '000F67': 'West Instruments', '000F5D': 'Genexis BV', '000F58': 'Adder Technology Limited', '000F41': 'Zipher Ltd', '000F4D': 'TalkSwitch', '000F46': 'SINAR AG', '000ECA': 'WTSS Inc', '000ECC': 'Tableau, LLC', '000ECB': 'VineSys Technology', '000ED2': 'Filtronic plc', '000EC8': 'Zoran Corporation', '000ED9': 'Aksys, Ltd.', '000EC2': 'Lowrance Electronics, Inc.', '000EFB': 'Macey Enterprises', '000EFA': 'Optoway Technology Incorporation', '000EFD': 'FUJINON CORPORATION', '000F12': 'Panasonic Europe Ltd.', '000F0E': 'WaveSplitter Technologies, Inc.', '000F0C': 'SYNCHRONIC ENGINEERING', '000F0B': 'Kentima Technologies AB', '000F21': 'Scientific Atlanta, Inc', '000F11': 'Prodrive B.V.', '000F13': 'Nisca corporation', '000EDB': 'XiNCOM Corp.', '000EDD': 'SHURE INCORPORATED', '000EDC': 'Tellion INC.', '000F14': 'Mindray Co., Ltd.', '000EF5': 'iPAC Technology Co., Ltd.', '000F09': 'Private', '000ECD': 'SKOV A/S', '000ED5': 'COPAN Systems Inc.', '000EB6': 'Riverbed Technology, Inc.', '000EB7': 'Knovative, Inc.', '000EB4': 'GUANGZHOU GAOKE COMMUNICATIONS TECHNOLOGY CO.LTD.', '000EB1': 'Newcotech,Ltd', '000E9E': 'Topfield Co., Ltd', '000EA3': 'CNCR-IT CO.,LTD,HangZhou P.R.CHINA', '000EA2': 'McAfee, Inc', '000E9B': 'Ambit Microsystems Corporation', '000E93': 'Milénio 3 Sistemas Electrónicos, Lda.', '000EA9': 'Shanghai Xun Shi Communications Equipment Ltd. Co.', '000E76': 'GEMSOC INNOVISION INC.', '000E7D': 'Electronics Line 3000 Ltd.', '000E78': 'Amtelco', '000E71': 'Gemstar Technology Development Ltd.', '000E70': 'in2 Networks', '000E63': 'Lemke Diagnostics GmbH', '000E5B': 'ParkerVision - Direct2Data', '000E8D': 'Systems in Progress Holding GmbH', '000E23': 'Incipient, Inc.', '000E25': 'Hannae Technology Co., Ltd', '000E20': 'ACCESS Systems Americas, Inc.', '000E21': 'MTU Friedrichshafen GmbH', '000E37': 'Harms & Wende GmbH & Co.KG', '000E31': 'Olympus Soft Imaging Solutions GmbH', '000E2F': 'Roche Diagnostics GmbH', '000E2A': 'Private', '000E2C': 'Netcodec co.', '000DEA': 'Kingtel Telecommunication Corp.', '000DED': 'Cisco Systems, Inc', '000DE4': 'DIGINICS, Inc.', '000E4E': 'Waveplus Technology Co., Ltd.', '000E4A': 'Changchun Huayu WEBPAD Co.,LTD', '000E05': 'WIRELESS MATRIX CORP.', '000E22': 'Private', '000E1C': 'Hach Company', '000E09': 'Shenzhen Coship Software Co.,LTD.', '000E41': 'NIHON MECHATRONICS CO.,LTD.', '000E3C': 'Transact Technologies Inc', '000E60': '360SUN Digital Broadband Corporation', '000E54': 'AlphaCell Wireless Ltd.', '000E02': 'Advantech AMT Inc.', '000DBE': 'Bel Fuse Europe Ltd.,UK', '000DBC': 'Cisco Systems, Inc', '000DD4': 'Symantec Corporation', '000DD2': 'Simrad Optronics ASA', '000DD1': 'Stryker Corporation', '000DD7': 'Bright', '000DD9': 'Anton Paar GmbH', '000D99': 'Orbital Sciences Corp.; Launch Systems Group', '000D8F': 'King Tsushin Kogyo Co., LTD.', '000DC9': 'THALES Elektronik Systeme GmbH', '000DC5': 'EchoStar Global B.V. ', '000D9F': 'RF Micro Devices', '000DA5': 'Fabric7 Systems, Inc', '000DDC': 'VAC', '000DE0': 'ICPDAS Co.,LTD', '000DE3': 'AT Sweden AB', '000DC8': 'AirMagnet, Inc', '000D81': 'Pepperl+Fuchs GmbH', '000D7A': 'DiGATTO Asia Pacific Pte Ltd', '000D77': 'FalconStor Software', '000D76': 'Hokuto Denshi Co,. Ltd.', '000D7B': 'Consensys Computers Inc.', '000D59': 'Amity Systems, Inc.', '000D4E': 'NDR Co.,LTD.', '000D50': 'Galazar Networks', '000D6F': 'Ember Corporation', '000D5E': 'NEC Personal Products', '000D49': 'Triton Systems of Delaware, Inc.', '000D48': 'AEWIN Technologies Co., Ltd.', '000D89': 'Bils Technology Inc', '000D86': 'Huber + Suhner AG', '000D3A': 'Microsoft Corp.', '000D30': 'IceFyre Semiconductor', '000D31': 'Compellent Technologies, Inc.', '000D6C': 'M-Audio', '000D70': 'Datamax Corporation', '000D5B': 'Smart Empire Investments Limited', '000D3F': 'VTI Instruments Corporation', '000CE3': 'Option International N.V.', '000CE7': 'MediaTek Inc.', '000CE8': 'GuangZhou AnJuBao Co., Ltd', '000CE4': 'NeuroCom International, Inc.', '000CD0': 'Symetrix', '000D17': 'Turbo Networks Co.Ltd', '000D18': 'Mega-Trend Electronics CO., LTD.', '000D20': 'ASAHIKASEI TECHNOSYSTEM CO.,LTD.', '000D0E': 'Inqnet Systems, Inc.', '000CC8': 'Xytronix Research & Design, Inc.', '000CCA': 'HGST a Western Digital Company', '000CB5': 'Premier Technolgies, Inc', '000D04': 'Foxboro Eckardt Development GmbH', '000D08': 'AboveCable, Inc.', '000D05': 'cybernet manufacturing inc.', '000CFE': 'Grand Electronic Co., Ltd', '000D2C': 'Net2Edge Limited', '000D25': 'SANDEN CORPORATION', '000D24': 'SENTEC E&E CO., LTD.', '000D11': 'DENTSPLY - Gendex', '000CD9': 'Itcare Co., Ltd', '000CD5': 'Passave Inc.', '000CD2': 'Schaffner EMV AG', '000D22': 'Unitronics LTD', '000D14': 'Vtech Innovation LP dba Advanced American Telephones', '000CB1': 'Salland Engineering (Europe) BV', '000CBC': 'Iscutum', '000C73': 'TELSON ELECTRONICS CO., LTD', '000CB6': 'NANJING SEU MOBILE & INTERNET TECHNOLOGY CO.,LTD', '000CC3': 'BeWAN systems', '000CB4': 'AutoCell Laboratories, Inc.', '000A07': 'WebWayOne Ltd', '000CB0': 'Star Semiconductor Corporation', '000C34': 'Vixen Co., Ltd.', '000C7E': 'Tellium Incorporated', '000C87': 'AMD', '000C98': 'LETEK Communications Inc.', '000C8E': 'Mentor Engineering Inc', '000CA2': 'Harmonic Video Network', '000CA4': 'Prompttec Product Management GmbH', '000C83': 'Logical Solutions', '000C96': 'OQO, Inc.', '000C1A': 'Quest Technical Solutions Inc.', '000C24': 'ANATOR', '000C19': 'Telio Communications GmbH', '000C1D': 'Mettler & Fuchs AG', '000C39': 'Sentinel Wireless Inc.', '000C32': 'Avionic Design Development GmbH', '000C33': 'Compucase Enterprise Co. Ltd.', '000C36': 'SHARP TAKAYA ELECTRONICS INDUSTRY CO.,LTD.', '000C06': 'Nixvue Systems Pte Ltd', '000C08': 'HUMEX Technologies Corp.', '000C0D': 'Communications & Power Industries / Satcom Division', '000C04': 'Tecnova', '000C65': 'Sunin Telecom', '000C6C': 'Eve Systems GmbH', '000C6F': 'Amtek system co.,LTD.', '000C2D': 'FullWave Technology Co., Ltd.', '000C26': 'Weintek Labs. Inc.', '000C2B': 'ELIAS Technology, Inc.', '000C1E': 'Global Cache', '000C01': 'Abatron AG', '000BF6': 'Nitgen Co., Ltd', '000C5B': 'HANWANG TECHNOLOGY CO.,LTD', '000C60': 'ACM Systems', '000C13': 'MediaQ', '000C58': 'M&S Systems', '000C51': 'Scientific Technologies Inc.', '000BF0': 'MoTEX Products Co., Ltd.', '000BF1': 'LAP Laser Applikations', '000BEE': 'inc.jet, Incorporated', '000BE2': 'Lumenera Corporation', '000BA8': 'HANBACK ELECTRONICS CO., LTD.', '000BA9': 'CloudShield Technologies, Inc.', '000BA1': 'Fujikura Solutions Ltd.', '000BA6': 'Miyakawa Electric Works Ltd.', '000BFD': 'Cisco Systems, Inc', '000BFA': 'EXEMYS SRL', '000BF4': 'Private', '000BFB': 'D-NET International Corporation', '000BCB': 'Fagor Automation , S. Coop', '000BC8': 'AirFlow Networks', '000BCE': 'Free2move AB', '000BCF': 'AGFA NDT INC.', '000BE1': 'Nokia NET Product Operations', '000BE0': 'SercoNet Ltd.', '000B93': 'Ritter Elektronik', '000B9B': 'Sirius System Co, Ltd.', '000BC3': 'Multiplex, Inc.', '000BBE': 'Cisco Systems, Inc', '000BBD': 'Connexionz Limited', '000B44': 'Concord IDea Corp.', '000B42': 'commax Co., Ltd.', '000B47': 'Advanced Energy', '000B4E': 'VertexRSI, General Dynamics SatCOM Technologies, Inc.', '000B4D': 'Emuzed', '000B40': 'Cambridge Industries Group (CIG)', '000B8E': 'Ascent Corporation', '000B8F': 'AKITA ELECTRONICS SYSTEMS CO.,LTD.', '000B8D': 'Avvio Networks', '000B85': 'Cisco Systems, Inc', '000B7F': 'Align Engineering LLC', '000B81': 'Kaparel Corporation', '000B82': 'Grandstream Networks, Inc.', '000B6E': 'Neff Instrument Corp.', '000B72': 'Lawo AG', '000B78': 'TAIFATECH INC.', '000B6C': 'Sychip Inc.', '0091D6': 'Crystal Group, Inc.', '000B5A': 'HyperEdge', '000B3D': 'CONTAL OK Ltd.', '000B1D': 'LayerZero Power Systems, Inc.', '000B19': 'Vernier Networks, Inc.', '000B16': 'Communication Machinery Corporation', '000B12': 'NURI Telecom Co., Ltd.', '000B0F': 'Bosch Rexroth', '000B0C': 'Agile Systems Inc.', '000B0A': 'dBm Optics', '000B09': 'Ifoundry Systems Singapore', '000AFE': 'NovaPal Ltd', '000AFD': 'Kentec Electronics', '000B31': 'Yantai ZhiYang Scientific and technology industry CO., LTD', '000B2F': 'bplan GmbH', '000AD5': 'Brainchild Electronic Co., Ltd.', '000AD6': 'BeamReach Networks', '000AEF': 'OTRUM ASA', '000B24': 'AirLogic', '000AE5': 'ScottCare Corporation', '000ACB': 'XPAK MSA Group', '000AC5': 'Color Kinetics', '000ABD': 'Rupprecht & Patashnick Co.', '000AB0': 'LOYTEC electronics GmbH', '000AB5': 'Digital Electronic Network', '000A9B': 'TB Group Inc', '000A73': 'Scientific Atlanta', '000A69': 'SUNNY bell Technology Co., Ltd.', '000A6C': 'Walchem Corporation', '000A98': 'M+F Gwinner GmbH & Co', '000A84': 'Rainsun Enterprise Co., Ltd.', '000A7E': 'The Advantage Group', '000A78': 'OLITEC', '000AD2': 'JEPICO Corporation', '000AA9': 'Brooks Automation GmbH', '000AA5': 'MAXLINK INDUSTRIES LIMITED', '000AA2': 'SYSTEK INC.', '000A91': 'HemoCue AB', '000A3A': 'J-THREE INTERNATIONAL Holding Co., Ltd.', '000A47': 'Allied Vision Technologies', '000A3C': 'Enerpoint Ltd.', '000A44': 'Avery Dennison Deutschland GmbH', '000A40': 'Crown Audio -- Harmanm International', '000A5F': 'almedio inc.', '000A6B': 'Tadiran Telecom Business Systems LTD', '000A61': 'Cellinx Systems Inc.', '000A5B': 'Power-One as', '000A1D': 'Optical Communications Products Inc.', '000A16': 'Lassen Research', '000A18': 'Vichel Inc.', '000A28': 'Motorola', '000A29': 'Pan Dacom Networking AG', '000A26': 'CEIA S.p.A.', '000A0D': 'FCI Deutschland GmbH', '0009F8': 'UNIMO TECHNOLOGY CO., LTD.', '0009FF': 'X.net 2000 GmbH', '000A55': 'MARKEM Corporation', '000A4C': 'Molecular Devices Corporation', '000A4D': 'Noritz Corporation', '000A06': 'Teledex LLC', '0009E8': 'Cisco Systems, Inc', '0009ED': 'CipherOptics', '0009F2': 'Cohu, Inc., Electronics Division', '0009E9': 'Cisco Systems, Inc', '0009DC': 'Galaxis Technology AG', '0009DD': 'Mavin Technology Inc.', '0009AF': 'e-generis', '0009AD': 'HYUNDAI SYSCOMM, INC.', '0009A9': 'Ikanos Communications', '000A03': 'ENDESA SERVICIOS, S.L.', '0009FE': 'Daisy Technologies, Inc.', '0009EB': 'HuMANDATA LTD.', '0009C2': 'Onity, Inc.', '0009C3': 'NETAS', '0009D1': 'SERANOA NETWORKS INC', '0009CE': 'SpaceBridge Semiconductor Corp.', '0009BD': 'Epygi Technologies, Ltd.', '0009B5': '3J Tech. Co., Ltd.', '0009B8': 'Entise Systems', '0009C6': 'Visionics Corporation', '00099F': 'VIDEX INC.', '0009A2': 'Interface Co., Ltd.', '0009A1': 'Telewise Communications, Inc.', '00096B': 'IBM Corp', '00096D': 'Powernet Technologies Corp.', '000964': 'Hi-Techniques, Inc.', '000965': 'HyunJu Computer Co., Ltd.', '00096F': 'Beijing Zhongqing Elegant Tech. Corp.,Limited', '000983': 'GlobalTop Technology, Inc.', '00097D': 'SecWell Networks Oy', '00095E': 'Masstech Group Inc.', '000959': 'Sitecsoft', '000957': 'Supercaller, Inc.', '000976': 'Datasoft ISDN Systems GmbH', '00097A': 'Louis Design Labs.', '00094F': 'elmegt GmbH & Co. KG', '000943': 'Cisco Systems, Inc', '000982': 'Loewe Opta GmbH', '00091C': 'CacheVision, Inc', '00091A': 'Macat Optics & Electronics Co., Ltd.', '00091B': 'Digital Generation Inc.', '000932': 'Omnilux', '000929': 'Sanyo Industries (UK) Limited', '0008DF': 'Alistel Inc.', '0008DB': 'Corrigent Systems', '0008D8': 'Dowkey Microwave', '0008EA': 'Motion Control Engineering, Inc', '0008ED': 'ST&T Instrument Corp.', '000939': 'ShibaSoku Co.,Ltd.', '000933': 'Ophit Co.Ltd.', '0008FA': 'KEB Automation KG', '0008F2': 'C&S Technology', '000928': 'Telecore', '00092E': 'B&Tech System Inc.', '0008D2': 'ZOOM Networks Inc.', '000902': 'Redline Communications Inc.', '000879': 'CEM Corporation', '00087D': 'Cisco Systems, Inc', '000875': 'Acorp Electronics Corp.', '00086F': 'Resources Computer Network Ltd.', '0008C5': 'Liontech Co., Ltd.', '0008CA': 'TwinHan Technology Co.,Ltd', '0008AC': 'Eltromat GmbH', '0008AA': 'KARAM', '0008AB': 'EnerLinx.com, Inc.', '0008AD': 'Toyo-Linx Co., Ltd.', '0008B6': 'RouteFree, Inc.', '0008B0': 'BKtel communications GmbH', '000885': 'EMS Dr. Thomas Wünsche', '000872': 'Sorenson Communications', '00087C': 'Cisco Systems, Inc', '000893': 'LE INFORMATION COMMUNICATION INC.', '000888': 'OULLIM Information Technology Inc,.', '00089A': 'Alcatel Microelectronics', '0008A1': 'CNet Technology Inc.', '000867': 'Uptime Devices', '00085E': 'PCO AG', '000856': 'Gamatronic Electronic Industries Ltd.', '0007E2': 'Bitworks, Inc.', '0007E6': 'edgeflow Canada Inc.', '0007E5': 'Coup Corporation', '0007DE': 'eCopilt AB', '0007DF': 'Vbrick Systems Inc.', '0005F9': 'TOA Corporation', '0007CA': 'Creatix Polymedia Ges Fur Kommunikaitonssysteme', '0007C8': 'Brain21, Inc.', '0007C5': 'Gcom, Inc.', '00081C': '@pos.com', '00081E': 'Repeatit AB', '000816': 'Bluelon ApS', '0007C1': 'Overture Networks, Inc.', '0007C2': 'Netsys Telecom', '0007F8': 'ITDevices, Inc.', '0007F3': 'Thinkengine Networks', '0007EE': 'telco Informationssysteme GmbH', '000811': 'VOIX Corporation', '000806': 'Raonet Systems, Inc.', '000826': 'Colorado Med Tech', '000853': 'Schleicher GmbH & Co. Relaiswerke KG', '0007AE': 'Britestream Networks, Inc.', '0007B1': 'Equator Technologies', '0007A7': 'A-Z Inc.', '0007A6': 'Leviton Manufacturing Co., Inc.', '000774': 'GuangZhou Thinker Technology Co. Ltd.', '000798': 'Selea SRL', '000791': 'International Data Communications, Inc.', '00078F': 'Emkay Innovative Products', '000782': 'Oracle Corporation ', '000786': 'Wireless Networks Inc.', '00075E': 'Ametek Power Instruments', '000760': 'TOMIS Information & Telecom Corp.', '0007A3': 'Ositis Software, Inc.', '00079A': 'Verint Systems Inc', '00074E': 'IPFRONT Inc', '000778': 'GERSTEL GmbH & Co. KG', '00076D': 'Flexlight Networks', '000779': 'Sungil Telecom Co., Ltd.', '00077E': 'Elrest GmbH', '000752': 'Rhythm Watch Co., Ltd.', '00074F': 'Cisco Systems, Inc', '000743': 'Chelsio Communications', '000747': 'Mecalc', '000744': 'Unico, Inc.', '000749': 'CENiX Inc.', '0006FA': 'IP SQUARE Co, Ltd.', '000703': 'CSEE Transport', '000706': 'Sanritz Corporation', '0006EF': 'Maxxan Systems, Inc.', '00073D': 'Nanjing Postel Telecommunications Co., Ltd.', '000739': 'Scotty Group Austria Gmbh', '000735': 'Flarion Technologies, Inc.', '000713': 'IP One, Inc.', '000707': 'Interalia Inc.', '0006F0': 'Digeo, Inc.', '000700': 'Zettamedia Korea', '0006F2': 'Platys Communications', '000722': 'The Nielsen Company', '00071F': 'European Systems Integration', '000721': 'Formac Elektronik GmbH', '0006E9': 'Intime Corp.', '0006EA': 'ELZET80 Mikrocomputer GmbH&Co. KG', '000708': 'Bitrage Inc.', '000712': 'JAL Information Technology', '000730': 'Hutchison OPTEL Telecom Technology Co., Ltd.', '0006E5': 'Fujian Newland Computer Ltd. Co.', '0006DE': 'Flash Technology', '0006DF': 'AIDONIC Corporation', '0006DD': 'AT & T Laboratories - Cambridge Ltd', '0006C5': 'INNOVI Technologies Limited', '0006C6': 'lesswire AG', '0006B7': 'TELEM GmbH', '0006BC': 'Macrolink, Inc.', '0006C2': 'Smartmatic Corporation', '000654': 'Winpresa Building Automation Technologies GmbH', '0006B4': 'Vorne Industries, Inc.', '0006AE': 'Himachal Futuristic Communications Ltd', '0006B0': 'Comtech EF Data Corp.', '00068A': 'NeuronNet Co. Ltd. R&D Center', '000685': 'NetNearU Corporation', '0006CA': 'American Computer & Digital Components, Inc. (ACDC)', '0006CE': 'DATENO', '00067F': 'Digeo, Inc.', '000683': 'Bravara Communications, Inc.', '000655': 'Yipee, Inc.', '0006D1': 'Tahoe Networks, Inc.', '0006D4': 'Interactive Objects, Inc.', '000696': 'Advent Networks', '00068E': 'HID Corporation', '00069B': 'AVT Audio Video Technologies GmbH', '000693': 'Flexus Computer Technology, Inc.', '00067A': 'JMP Systems', '000673': 'TKH Security Solutions USA', '000676': 'Novra Technologies Inc.', '000664': 'Fostex Corporation', '000644': 'NextGen Business Solutions, Inc', '000645': 'Meisei Electric Co. Ltd.', '00063A': 'Dura Micro, Inc.', '000628': 'Cisco Systems, Inc', '00061D': 'MIP Telecom, Inc.', '000632': 'Mesco Engineering GmbH', '000634': 'GTE Airfone Inc.', '00065E': 'Photuris, Inc.', '000640': 'White Rock Networks', '00060C': 'Melco Industries, Inc.', '00060E': 'IGYS Systems, Inc.', '000614': 'Prism Holdings', '000619': 'Connection Technology Systems', '00065D': 'Heidelberg Web Systems', '000650': 'Tiburon Networks, Inc.', '000608': 'At-Sky SAS', '0005EB': 'Blue Ridge Networks, Inc.', '0005D7': 'Vista Imaging, Inc.', '0005DB': 'PSI Nentec GmbH', '0005B9': 'Airvana, Inc.', '0005BC': 'Resource Data Management Ltd', '0005BE': 'Kongsberg Seatex AS', '0005BD': 'ROAX BV', '0005C1': 'A-Kyung Motion, Inc.', '0005B6': 'INSYS Microelectronics GmbH', '0005E8': 'TurboWave, Inc.', '0005F6': 'Young Chang Co. Ltd.', '0005FC': 'Schenck Pegasus Corp.', 'A06A00': 'Verilink Corporation', '0005F8': 'Real Time Access, Inc.', '0005B7': 'Arbor Technology Corp.', '00059B': 'Cisco Systems, Inc', '00059F': 'Yotta Networks, Inc.', '000587': 'Locus, Incorporated', '000590': 'Swissvoice Ltd.', '000595': 'Alesis Corporation', '0005DD': 'Cisco Systems, Inc', '0005D9': 'Techno Valley, Inc.', '0005DC': 'Cisco Systems, Inc', '0005B4': 'Aceex Corporation', '000598': 'CRONOS S.r.l.', '0005C3': 'Pacific Instruments, Inc.', '0005D0': 'Solinet Systems', '00054A': 'Ario Data Networks, Inc.', '000548': 'Disco Corporation', '00053E': 'KID Systeme GmbH', '00053F': 'VisionTek, Inc.', '00057E': 'Eckelmann Steuerungstechnik GmbH', '000580': 'FibroLAN Ltd.', '000582': 'ClearCube Technology', '000578': 'Private', '000527': 'SJ Tek Co. Ltd', '000529': 'Shanghai Broadan Communication Technology Co., Ltd', '00052C': 'Supreme Magic Corporation', '000572': 'Deonet Co., Ltd.', '000576': 'NSM Technology Ltd.', '00056A': 'Heuft Systemtechnik GmbH', '000555': 'Japan Cash Machine Co., Ltd.', '000552': 'Xycotec Computer GmbH', '000534': 'Northstar Engineering Ltd.', '000535': 'Chip PC Ltd.', '000568': 'Piltofish Networks AB', '000562': 'Digital View Limited', '00053D': 'Agere Systems', '000519': 'Siemens Building Technologies AG,', '0004DA': 'Relax Technology, Inc.', '0004E5': 'Glonet Systems, Inc.', '0004D2': 'Adcon Telemetry GmbH', '0004D3': 'Toyokeiki Co., Ltd.', '0004D5': 'Hitachi Information & Communication Engineering, Ltd.', '0004AB': 'Mavenir Inc.', '0004A7': 'FabiaTech Corporation', '0004AA': 'Jetstream Communications', '000500': 'Cisco Systems, Inc', '000507': 'Fine Appliance Corp.', '0004FD': 'Japan Control Engineering Co., Ltd.', '0004F7': 'Omega Band, Inc.', '0004EE': 'Lincoln Electric Company', '0004F0': 'International Computers, Ltd', '0004CA': 'FreeMs Corp.', '0004C5': 'ASE Technologies, USA', '00050D': 'Midstream Technologies, Inc.', '0004B0': 'ELESIGN Co., Ltd.', '000455': 'ANTARA.net', '000457': 'Universal Access Technology, Inc.', '0004A0': 'Verity Instruments, Inc.', '00049E': 'Wirelink Co., Ltd.', '00049A': 'Cisco Systems, Inc', '000498': 'Mahi Networks', '000497': 'MacroSystem Digital Video AG', '00044D': 'Cisco Systems, Inc', '000454': 'Quadriga UK', '000448': 'Polaroid Corporation', '000447': 'Acrowave Systems Co., Ltd.', '000485': 'PicoLight', '000479': 'Radius Co., Ltd.', '00043E': 'Telencomm', '000436': 'ELANsat Technologies, Inc.', '000432': 'Voyetra Turtle Beach, Inc.', '000435': 'InfiNet LLC', '000437': 'Powin Information Technology, Inc.', '00046D': 'Cisco Systems, Inc', '000488': 'Eurotherm Controls', '000466': 'ARMITEL Co.', '0003D3': 'Internet Energy Systems, Inc.', '0003CD': 'Clovertech, Inc.', '0003CC': 'Momentum Computer, Inc.', '000414': 'Umezawa Musen Denki Co., Ltd.', '00040C': 'Kanno Works, Ltd.', '000408': 'Sanko Electronics Co., Ltd.', '000407': 'Topcon Positioning Systems, Inc.', '0003EE': 'MKNet Corporation', '0003EA': 'Mega System Technologies, Inc.', '0003E6': 'Entone, Inc.', '000409': 'Cratos Networks', '0003E2': 'Comspace Corporation', '0003FF': 'Microsoft Corporation', '0003D7': 'NextNet Wireless, Inc.', '0003A9': 'AXCENT Media AG', '00039F': 'Cisco Systems, Inc', '0003B5': 'Entra Technology Co.', '000428': 'Cisco Systems, Inc', '00037D': 'Stellcom', '000383': 'Metera Networks, Inc.', '000379': 'Proscend Communications, Inc.', '000366': 'ASM Pacific Technology', '000362': 'Vodtel Communications, Inc.', '000364': 'Scenix Semiconductor, Inc.', '00035E': 'Metropolitan Area Networks, Inc.', '000341': 'Axon Digital Design', '008037': 'Ericsson Group', '00033D': 'ILSHin Lab', '000334': 'Newport Electronics', '000390': 'Digital Video Communications, Inc.', '00038B': 'PLUS-ONE I&T, Inc.', '00033A': 'Silicon Wave, Inc.', '000332': 'Cisco Systems, Inc', '00038C': 'Total Impact', '000386': 'Ho Net, Inc.', '00036F': 'Telsey SPA', '000372': 'ULAN', '00035C': 'Saint Song Corp.', '00035D': 'Bosung Hi-Net Co., Ltd.', '00034D': 'Chiaro Networks, Ltd.', '0002B5': 'Avnet, Inc.', '0002B4': 'DAPHNE', '0002AC': '3PAR data', '0002B0': 'Hokubu Communication & Industrial Co., Ltd.', '0002AA': 'PLcom Co., Ltd.', '0002D9': 'Reliable Controls', '0002D2': 'Workstation AG', '0002CD': 'TeleDream, Inc.', '0002D0': 'Comdial Corporation', '0002CC': 'M.C.C.I', '0002C5': 'Evertz Microsystems Ltd.', '00030E': 'Core Communications Co., Ltd.', '000312': 'TRsystems GmbH', '0002F2': 'eDevice, Inc.', '0002EF': 'CCC Network Systems Group Ltd.', '00029D': 'Merix Corp.', '000326': 'Iwasaki Information Systems Co., Ltd.', '000322': 'IDIS Co., Ltd.', '00031D': 'Taiwan Commate Computer, Inc.', '0002E8': 'E.D.&A.', '000253': 'Televideo, Inc.', '00024A': 'Cisco Systems, Inc', '000249': 'Aviv Infocom Co, Ltd.', '00023A': 'ZSK Stickmaschinen GmbH', '000232': 'Avision, Inc.', '000235': 'Paragon Networks International', '000237': 'Cosmo Research Corp.', '00027F': 'ask-technologies.com', '00027E': 'Cisco Systems, Inc', '001095': 'Thomson Inc.', '00027B': 'Amplify Net, Inc.', '000245': 'Lampus Co, Ltd.', '000246': 'All-Win Tech Co., Ltd.', '000273': 'Coriolis Networks', '00026F': 'Senao International Co., Ltd.', '000286': 'Occam Networks', '00025D': 'Calix Networks', '000257': 'Microcom Corp.', '000290': 'Woorigisool, Inc.', '000292': 'Logic Innovations, Inc.', '00021C': 'Network Elements, Inc.', '00020C': 'Metro-Optix', '000216': 'Cisco Systems, Inc', '000214': 'DTVRO', '0001F6': 'Association of Musical Electronics Industry', '0001ED': 'SETA Corp.', '0001CB': 'EVR', '0001C3': 'Acromag, Inc.', '0001C2': 'ARK Research Corp.', '0001E0': 'Fast Systems, Inc.', '0001D6': 'manroland AG', '0001EA': 'Cirilium Corp.', '000228': 'Necsom, Ltd.', '000230': 'Intersoft Electronics', '00020F': 'AATR', '0030AC': 'Systeme Lauer GmbH & Co., Ltd.', '0001AC': 'Sitara Networks, Inc.', '0001AD': 'Coach Master International d.b.a. CMI Worldwide, Inc.', '000188': 'LXCO Technologies ag', '000158': 'Electro Industries/Gauge Tech', '00019B': 'Kyoto Microcomputer Co., Ltd.', '00017F': 'Experience Music Project', '000187': 'I2SE GmbH', '00012A': 'Telematica Sistems Inteligente', '00012D': 'Komodo Technology', '000148': 'X-traWeb Inc.', '000171': 'Allied Data Technologies', '000166': 'TC GROUP A/S', '000176': 'Orient Silver Enterprises', '000180': 'AOpen, Inc.', '000197': 'Cisco Systems, Inc', '00019A': 'LEUNIG GmbH', '00015A': 'Digital Video Broadcasting', '000159': 'S1 Corporation', '00013A': 'SHELCAD COMMUNICATIONS, LTD.', '000140': 'Sendtek Corporation', '0030C4': 'Canon Imaging Systems Inc.', '00304D': 'ESI', '00302E': 'Hoft & Wessel AG', '0030ED': 'Expert Magnetics Corp.', '00300F': 'IMT - Information Management T', '000123': 'Schneider Electric Japan Holdings Ltd.', '000125': 'YAESU MUSEN CO., LTD.', '000126': 'PAC Labs', '00011B': 'Unizone Technologies, Inc.', '00011D': 'Centillium Communications', '00011F': 'RC Networks, Inc.', '00B080': 'Mannesmann Ipulsys B.V.', '00B01E': 'Rantic Labs, Inc.', '003082': 'TAIHAN ELECTRIC WIRE CO., LTD.', '0030FB': 'AZS Technology AG', '0030AE': 'Times N System, Inc.', '003003': 'Phasys Ltd.', '00B0F0': 'CALY NETWORKS', '00B09A': 'Morrow Technologies Corp.', '0030A9': 'Netiverse, Inc.', '0030FE': 'DSA GmbH', '0030E2': 'GARNET SYSTEMS CO., LTD.', '0030D5': 'DResearch GmbH', '081443': 'UNIBRAIN S.A.', '00B009': 'Grass Valley, A Belden Brand', '00B0AC': 'SIAE-Microelettronica S.p.A.', '0030AF': 'Honeywell GmbH', '003023': 'COGENT COMPUTER SYSTEMS, INC.', '003090': 'CYRA TECHNOLOGIES, INC.', '0030A7': 'SCHWEITZER ENGINEERING', '00309A': 'ASTRO TERRA CORP.', '00309F': 'AMBER NETWORKS', '0030A8': "OL'E COMMUNICATIONS, INC.", '0030D1': 'INOVA CORPORATION', '003018': 'Jetway Information Co., Ltd.', '003089': 'Spectrapoint Wireless, LLC', '00305D': 'DIGITRA SYSTEMS, INC.', '003055': 'Renesas Technology America, Inc.', '00302F': 'GE Aviation System', '003087': 'VEGA GRIESHABER KG', '0030AA': 'AXUS MICROSYSTEMS, INC.', '00300E': 'Klotz Digital AG', '0030BB': 'CacheFlow, Inc.', '003007': 'OPTI, INC.', '0030BD': 'BELKIN COMPONENTS', '00307C': 'ADID SA', '0030E6': 'Draeger Medical Systems, Inc.', '003062': 'IP Video Networks Inc', '00302D': 'QUANTUM BRIDGE COMMUNICATIONS', '0030CB': 'OMNI FLOW COMPUTERS, INC.', '00D0E9': 'Advantage Century Telecommunication Corp.', '00D094': 'Seeion Control LLC', '00D015': 'UNIVEX MICROTECHNOLOGY CORP.', '00D048': 'ECTON, INC.', '00D0A5': 'AMERICAN ARIUM', '00D0CF': 'MORETON BAY', '00D07F': 'STRATEGY & TECHNOLOGY, LIMITED', '003036': 'RMP ELEKTRONIKSYSTEME GMBH', '00306B': 'CMOS SYSTEMS, INC.', '0030AD': 'SHANGHAI COMMUNICATION', '0030CF': 'TWO TECHNOLOGIES, INC.', '0030B2': 'L-3 Sonoma EO', '003035': 'Corning Incorporated', '00307F': 'IRLAN LTD.', '00D085': 'OTIS ELEVATOR COMPANY', '00D0C2': 'BALTHAZAR TECHNOLOGY AB', '00D022': 'INCREDIBLE TECHNOLOGIES, INC.', '00D071': 'ECHELON CORP.', '00D04F': 'BITRONICS, INC.', '00D0FB': 'TEK MICROSYSTEMS, INCORPORATED', '00D081': 'RTD Embedded Technologies, Inc.', '00D002': 'DITECH CORPORATION', '00D069': 'TECHNOLOGIC SYSTEMS', '00D090': 'Cisco Systems, Inc', '00D066': 'WINTRISS ENGINEERING CORP.', '00D082': 'IOWAVE INC.', '00D09C': 'KAPADIA COMMUNICATIONS', '00D0F3': 'SOLARI DI UDINE SPA', '00D0F5': 'ORANGE MICRO, INC.', '00D078': 'Eltex of Sweden AB', '00D09B': 'SPECTEL LTD.', '00D011': 'PRISM VIDEO, INC.', '00D062': 'DIGIGRAM', '00D08D': 'PHOENIX GROUP, INC.', '00D0DF': 'KUZUMI ELECTRONICS, INC.', '00D0E1': 'AVIONITEK ISRAEL INC.', '00D008': 'MACTELL CORPORATION', '00D0D9': 'DEDICATED MICROCOMPUTERS', '00D00B': 'RHK TECHNOLOGY, INC.', '00D0A0': 'MIPS DENMARK', '00D00A': 'LANACCESS TELECOM S.A.', '00D01C': 'SBS TECHNOLOGIES,', '00D0D5': 'GRUNDIG AG', '00D039': 'UTILICOM, INC.', '00D067': 'CAMPIO COMMUNICATIONS', '00503E': 'Cisco Systems, Inc', '005020': 'MEDIASTAR CO., LTD.', '00D075': 'ALARIS MEDICAL SYSTEMS, INC.', '0050E7': 'PARADISE INNOVATIONS (ASIA)', '005021': 'EIS INTERNATIONAL, INC.', '00505E': 'DIGITEK MICROLOGIC S.A.', '005090': 'DCTRI', '00503B': 'MEDIAFIRE CORPORATION', '00D058': 'Cisco Systems, Inc', '00D032': 'YANO ELECTRIC CO., LTD.', '00D0F1': 'SEGA ENTERPRISES, LTD.', '00D03D': 'GALILEO TECHNOLOGY, LTD.', '00D06D': 'ACRISON, INC.', '00D02B': 'JETCELL, INC.', '00D03A': 'ZONEWORX, INC.', '00D001': 'VST TECHNOLOGIES, INC.', '00D041': 'AMIGO TECHNOLOGY CO., LTD.', '00D09D': 'VERIS INDUSTRIES', '0050FB': 'VSK ELECTRONICS', '0050F4': 'SIGMATEK GMBH & CO. KG', '00D04C': 'EUROTEL TELECOM LTD.', '005046': 'MENICX INTERNATIONAL CO., LTD.', '005041': 'Coretronic Corporation', '0050B0': 'TECHNOLOGY ATLANTA CORPORATION', '0050DD': 'SERRA SOLDADURA, S.A.', '005067': 'AEROCOMM, INC.', '0050B6': 'GOOD WAY IND. CO., LTD.', '00504B': 'BARCONET N.V.', '005063': 'OY COMSEL SYSTEM AB', '00508D': 'ABIT COMPUTER CORPORATION', '0050A0': 'DELTA COMPUTER SYSTEMS, INC.', '005086': 'TELKOM SA, LTD.', '005000': 'NEXO COMMUNICATIONS, INC.', '0050C8': 'Addonics Technologies, Inc.', '0050C4': 'IMD', '00501A': 'IQinVision', '00508F': "ASITA TECHNOLOGIES INT'L LTD.", '005015': 'BRIGHT STAR ENGINEERING', '005057': 'BROADBAND ACCESS SYSTEMS', '005089': 'SAFETY MANAGEMENT SYSTEMS', '005066': 'AtecoM GmbH advanced telecomunication modules', '005059': 'iBAHN', '0050C6': 'LOOP TELECOMMUNICATION INTERNATIONAL, INC.', '00509F': 'HORIZON COMPUTER', '0050A5': 'CAPITOL BUSINESS SYSTEMS, LTD.', '0050EA': 'XEL COMMUNICATIONS, INC.', '0050E8': 'NOMADIX INC.', '0050D9': 'ENGETRON-ENGENHARIA ELETRONICA IND. e COM. LTDA', '005001': 'YAMASHITA SYSTEMS CORP.', '0050AE': 'FDK Co., Ltd', '00907B': 'E-TECH, INC.', '009081': 'ALOHA NETWORKS, INC.', '00901C': 'mps Software Gmbh', '0090DB': 'NEXT LEVEL COMMUNICATIONS', '009056': 'TELESTREAM, INC.', '009034': 'IMAGIC, INC.', '009073': 'GAIO TECHNOLOGY', '0090BB': 'TAINET COMMUNICATION SYSTEM Corp.', '009090': 'I-BUS', '00901A': 'UNISPHERE SOLUTIONS', '00905E': 'RAULAND-BORG CORPORATION', '0090AF': 'J. MORITA MFG. CORP.', '005088': 'AMANO CORPORATION', '005031': 'AEROFLEX LABORATORIES, INC.', '005003': 'Xrite Inc', '0050D3': 'DIGITAL AUDIO PROCESSING PTY. LTD.', '0050AD': 'CommUnique Wireless Corp.', '0050AF': 'INTERGON, INC.', '009068': 'DVT CORP.', '0090B5': 'NIKON CORPORATION', '009005': 'PROTECH SYSTEMS CO., LTD.', '0090F8': 'MEDIATRIX TELECOM', '009010': 'SIMULATION LABORATORIES, INC.', '0090C6': 'OPTIM SYSTEMS, INC.', '0090CA': 'ACCORD VIDEO TELECOMMUNICATIONS, LTD.', '0090E9': 'JANZ COMPUTER AG', '009037': 'ACUCOMM, INC.', '009078': 'MER TELEMANAGEMENT SOLUTIONS, LTD.', '009074': 'ARGON NETWORKS, INC.', '00909F': 'DIGI-DATA CORPORATION', '00903B': 'TriEMS Research Lab, Inc.', '009019': 'HERMES ELECTRONICS CO., LTD.', '0090BC': 'TELEMANN CO., LTD.', '00900A': 'PROTON ELECTRONIC INDUSTRIAL CO., LTD.', '0090D5': 'EUPHONIX, INC.', '00904A': 'CONCUR SYSTEM TECHNOLOGIES', '00909E': 'Critical IO, LLC', '0090AA': 'INDIGO ACTIVE VISION SYSTEMS LIMITED', '00905B': 'RAYMOND AND LAE ENGINEERING', '0090EB': 'SENTRY TELECOM SYSTEMS', '0090FE': 'ELECOM CO., LTD. (LANEED DIV.)', '009059': 'TELECOM DEVICE K.K.', '0090E5': 'TEKNEMA, INC.', '0090F4': 'LIGHTNING INSTRUMENTATION', '0090B8': 'ROHDE & SCHWARZ GMBH & CO. KG', '0090D8': 'WHITECROSS SYSTEMS', '00902E': 'NAMCO LIMITED', '00908F': 'AUDIO CODES LTD.', '001028': 'COMPUTER TECHNICA, INC.', '0010B7': 'COYOTE TECHNOLOGIES, LLC', '00102C': 'Lasat Networks A/S', '0010FD': 'COCOM A/S', '0010C9': 'MITSUBISHI ELECTRONICS LOGISTIC SUPPORT CO.', '000400': 'LEXMARK INTERNATIONAL, INC.', '001092': 'NETCORE INC.', '00101C': 'OHM TECHNOLOGIES INTL, LLC', '001046': 'ALCORN MCBRIDE INC.', '0010C5': 'PROTOCOL TECHNOLOGIES, INC.', '00101A': 'PictureTel Corp.', '00903F': 'AZTEC RADIOMEDIA', '001043': 'A2 CORPORATION', '00104E': 'CEOLOGIC', '0010A5': 'OXFORD INSTRUMENTS', '0010D7': 'ARGOSY RESEARCH INC.', '00109E': 'AWARE, INC.', '001005': 'UEC COMMERCIAL', '0010B8': 'ISHIGAKI COMPUTER SYSTEM CO.', '00108B': 'LASERANIMATION SOLLINGER GMBH', '001047': 'ECHO ELETRIC CO. LTD.', '001070': 'CARADON TREND LTD.', '0010BA': 'MARTINHO-DAVIS SYSTEMS, INC.', '0010C2': 'WILLNET, INC.', '001040': 'INTERMEC CORPORATION', '00102E': 'NETWORK SYSTEMS & TECHNOLOGIES PVT. LTD.', '0010B0': 'MERIDIAN TECHNOLOGY CORP.', '001067': 'Ericsson', '001021': 'ENCANTO NETWORKS, INC.', '001064': 'DNPG, LLC', '0004AC': 'IBM Corp', '0010B4': 'ATMOSPHERE NETWORKS', '0010C7': 'DATA TRANSMISSION NETWORK', '001074': 'ATEN INTERNATIONAL CO., LTD.', '00E05E': 'JAPAN AVIATION ELECTRONICS INDUSTRY, LTD.', '00E09D': 'SARNOFF CORPORATION', '00E028': 'APTIX CORPORATION', '00E08C': 'NEOPARADIGM LABS, INC.', '00E07D': 'NETRONIX, INC.', '00E05D': 'UNITEC CO., LTD.', '00E0A1': 'HIMA PAUL HILDEBRANDT GmbH Co. KG', '00E088': 'LTX-Credence CORPORATION', '00E0DF': 'KEYMILE GmbH', '00E0F2': 'ARLOTTO COMNET, INC.', '00E058': 'PHASE ONE DENMARK A/S', '00E076': 'DEVELOPMENT CONCEPTS, INC.', '00E0CD': 'SAAB SENSIS CORPORATION', '00E0E1': 'G2 NETWORKS, INC.', '00E08D': 'PRESSURE SYSTEMS, INC.', '00E046': 'BENTLY NEVADA CORP.', '00E03D': 'FOCON ELECTRONIC SYSTEMS A/S', '00E019': 'ING. GIORDANO ELETTRONICA', '00E0F8': 'DICNA CONTROL AB', '006039': 'SanCom Technology, Inc.', '006049': 'VINA TECHNOLOGIES', '00608D': 'UNIPULSE CORP.', '006099': 'SBE, Inc.', '0060B3': 'Z-COM, INC.', '00E0D2': 'VERSANET COMMUNICATIONS, INC.', '00E047': 'InFocus Corporation', '00E0C3': 'SAKAI SYSTEM DEVELOPMENT CORP.', '00E092': 'ADMTEK INCORPORATED', '00E0FF': 'SECURITY DYNAMICS TECHNOLOGIES, Inc.', '00E0AB': 'DIMAT S.A.', '00E030': 'MELITA INTERNATIONAL CORP.', '00E0AA': 'ELECTROSONIC LTD.', '00E075': 'Verilink Corporation', '00E02E': 'SPC ELECTRONICS CORPORATION', '00E09A': 'Positron Inc.', '00E03E': 'ALFATECH, INC.', '006002': 'SCREEN SUBTITLING SYSTEMS, LTD', '006089': 'XATA', '006021': 'DSC CORPORATION', '0060B8': 'CORELIS Inc.', '00E010': 'HESS SB-AUTOMATENBAU GmbH', '00E033': 'E.E.P.D. GmbH', '00E0A2': 'MICROSLATE INC.', '00E079': 'A.T.N.R.', '00609C': 'Perkin-Elmer Incorporated', '0060CF': 'ALTEON NETWORKS, INC.', '00E07B': 'BAY NETWORKS', '00E01D': 'WebTV NETWORKS, INC.', '0060B9': 'NEC Platforms, Ltd', '0060CE': 'ACCLAIM COMMUNICATIONS', '006036': 'AIT Austrian Institute of Technology GmbH', '00608E': 'HE ELECTRONICS, TECHNOLOGIE & SYSTEMTECHNIK GmbH', '00601A': 'KEITHLEY INSTRUMENTS', '0060AD': 'MegaChips Corporation', '006055': 'CORNELL UNIVERSITY', '0060D0': 'SNMP RESEARCH INCORPORATED', '0060B7': 'CHANNELMATIC, INC.', '006006': 'SOTEC CO., LTD', '0060BA': 'SAHARA NETWORKS, INC.', '00606A': 'MITSUBISHI WIRELESS COMMUNICATIONS. INC.', '0060AF': 'PACIFIC MICRO DATA, INC.', '006038': 'Nortel Networks', '00606D': 'DIGITAL EQUIPMENT CORP.', '006098': 'HT COMMUNICATIONS', '006075': 'PENTEK, INC.', '006015': 'NET2NET CORPORATION', '0060DE': 'Kayser-Threde GmbH', '00604F': 'Tattile SRL ', '0060E8': 'HITACHI COMPUTER PRODUCTS (AMERICA), INC.', '0060F6': 'NEXTEST COMMUNICATIONS PRODUCTS, INC.', '006072': 'VXL INSTRUMENTS, LIMITED', '006051': 'QUALITY SEMICONDUCTOR', '00609D': 'PMI FOOD EQUIPMENT GROUP', '0060A2': 'NIHON UNISYS LIMITED CO.', '006084': 'DIGITAL VIDEO', '00602D': 'ALERTON TECHNOLOGIES, INC.', '0060F8': 'Loran International Technologies Inc.', '006092': 'MICRO/SYS, INC.', '00609E': 'ASC X3 - INFORMATION TECHNOLOGY STANDARDS SECRETARIATS', '006010': 'NETWORK MACHINES, INC.', '006044': 'LITTON/POLY-SCIENTIFIC', '006078': 'POWER MEASUREMENT LTD.', '006004': 'COMPUTADORES MODULARES SA', '0060E2': 'QUEST ENGINEERING & DEVELOPMENT', '0060B4': 'GLENAYRE R&D INC.', '00A01D': 'Red Lion Controls, LP', '00A0B9': 'EAGLE TECHNOLOGY, INC.', '00A019': 'NEBULA CONSULTANTS, INC.', '00A0ED': 'Brooks Automation, Inc.', '0060CA': 'HARMONIC SYSTEMS INCORPORATED', '006024': 'GRADIENT TECHNOLOGIES, INC.', '00A0A6': 'M.I. SYSTEMS, K.K.', '00A051': 'ANGIA COMMUNICATIONS. INC.', '00A013': 'TELTREND LTD.', '00A058': 'GLORY, LTD.', '00A0D0': 'TEN X TECHNOLOGY, INC.', '00A0BC': 'VIASAT, INCORPORATED', '00A05B': 'MARQUIP, INC.', '00A08C': 'MultiMedia LANs, Inc.', '00A038': 'EMAIL ELECTRONICS', '00A077': 'FUJITSU NEXION, INC.', '00A0A0': 'COMPACT DATA, LTD.', '00A042': 'SPUR PRODUCTS CORP.', '00A0C1': 'ORTIVUS MEDICAL AB', '00A04F': 'AMERITEC CORP.', '00A0CF': 'SOTAS, INC.', '00A072': 'OVATION SYSTEMS LTD.', '00A082': 'NKT ELEKTRONIK A/S', '00A0F0': 'TORONTO MICROELECTRONICS INC.', '00A0D7': 'KASTEN CHASE APPLIED RESEARCH', '00A065': 'Symantec Corporation', '00A0A3': 'RELIABLE POWER METERS', '00A01B': 'PREMISYS COMMUNICATIONS, INC.', '00A055': 'Data Device Corporation', '00A074': 'PERCEPTION TECHNOLOGY', '00A07F': 'GSM-SYNTEL, LTD.', '00A0AA': 'SPACELABS MEDICAL', '00A03B': 'TOSHIN ELECTRIC CO., LTD.', '00A0F3': 'STAUBLI', '00A029': 'COULTER CORPORATION', '00A087': 'Microsemi Corporation', '00A043': 'AMERICAN TECHNOLOGY LABS, INC.', '00A0F1': 'MTI', '00A0B3': 'ZYKRONIX', '00A0FF': 'TELLABS OPERATIONS, INC.', '00A0E5': 'NHC COMMUNICATIONS', '00A036': 'APPLIED NETWORK TECHNOLOGY', '00A0D2': 'ALLIED TELESIS INTERNATIONAL CORPORATION', '00A09B': 'QPSX COMMUNICATIONS, LTD.', '00A000': 'CENTILLION NETWORKS, INC.', '00A08A': 'BROOKTROUT TECHNOLOGY, INC.', '00A07B': 'DAWN COMPUTER INCORPORATION', '00A05C': 'INVENTORY CONVERSION, INC./', '00200F': 'EBRAINS Inc', '00A0D3': 'INSTEM COMPUTER SYSTEMS, LTD.', '00A0B4': 'TEXAS MICROSYSTEMS, INC.', '00A060': 'ACER PERIPHERALS, INC.', '00A083': 'ASIMMPHONY TURKEY', '0020F9': 'PARALINK NETWORKS, INC.', '002092': 'CHESS ENGINEERING B.V.', '00202B': 'ADVANCED TELECOMMUNICATIONS MODULES, LTD.', '0020DF': 'KYOSAN ELECTRIC MFG. CO., LTD.', '0020C7': 'AKAI Professional M.I. Corp.', '00A004': 'NETPOWER, INC.', '002087': 'MEMOTEC, INC.', '0020D1': 'MICROCOMPUTER SYSTEMS (M) SDN.', '0020CE': 'LOGICAL DESIGN GROUP, INC.', '002014': 'GLOBAL VIEW CO., LTD.', '0020C2': 'TEXAS MEMORY SYSTEMS, INC.', '002029': 'TELEPROCESSING PRODUCTS, INC.', '002069': 'ISDN SYSTEMS CORPORATION', '00208B': 'LAPIS TECHNOLOGIES, INC.', '002057': 'TITZE DATENTECHNIK GmbH', '002015': 'ACTIS COMPUTER SA', '002099': 'BON ELECTRIC CO., LTD.', '002006': 'GARRETT COMMUNICATIONS, INC.', '002024': 'PACIFIC COMMUNICATION SCIENCES', '00206B': 'KONICA MINOLTA HOLDINGS, INC.', '002004': 'YAMATAKE-HONEYWELL CO., LTD.', '002043': 'NEURON COMPANY LIMITED', '002071': 'IBR GMBH', '0020E5': 'APEX DATA, INC.', '0020BC': 'Long Reach Networks Pty Ltd', '00207C': 'AUTEC GMBH', '00C0C0': 'SHORE MICROSYSTEMS, INC.', '00C00C': 'RELIA TECHNOLGIES', '00C073': 'XEDIA CORPORATION', '00C0D4': 'AXON NETWORKS, INC.', '00C0CD': 'COMELTA, S.A.', '002085': 'Eaton Corporation', '0020CD': 'HYBRID NETWORKS, INC.', '00202E': 'DAYSTAR DIGITAL', '0020B3': 'Tattile SRL ', '002016': 'SHOWA ELECTRIC WIRE & CABLE CO', '0020EE': 'GTECH CORPORATION', '00204C': 'MITRON COMPUTER PTE LTD.', '002017': 'ORBOTECH', '002093': 'LANDINGS TECHNOLOGY CORP.', '002063': 'WIPRO INFOTECH LTD.', '00204D': 'INOVIS GMBH', '00205F': 'GAMMADATA COMPUTER GMBH', '00201F': 'BEST POWER TECHNOLOGY, INC.', '0020B6': 'AGILE NETWORKS, INC.', '00C0F3': 'NETWORK COMMUNICATIONS CORP.', '002056': 'NEOPRODUCTS', '002042': 'DATAMETRICS CORP.', '002078': 'RUNTOP, INC.', '0020ED': 'GIGA-BYTE TECHNOLOGY CO., LTD.', '00205D': 'NANOMATIC OY', '00C058': 'DATAEXPERT CORP.', '00C0D0': 'RATOC SYSTEM INC.', '00C0BF': 'TECHNOLOGY CONCEPTS, LTD.', '00C0BA': 'NETVANTAGE', '00C05E': 'VARI-LITE, INC.', '00C005': 'LIVINGSTON ENTERPRISES, INC.', '00C077': 'DAEWOO TELECOM LTD.', '00C0C8': 'MICRO BYTE PTY. LTD.', '00C069': 'Axxcelera Broadband Wireless', '00C067': 'UNITED BARCODE INDUSTRIES', '00C0A3': 'DUAL ENTERPRISES CORPORATION', '00C018': 'LANART CORPORATION', '009D8E': 'CARDIAC RECORDERS, INC.', '00BB01': 'OCTOTHORPE CORP.', '00C033': 'TELEBIT COMMUNICATIONS APS', '00C090': 'PRAIM S.R.L.', '00C0DE': 'ZCOMM, INC.', '00C013': 'NETRIX', '00C04C': 'DEPARTMENT OF FOREIGN AFFAIRS', '00C07C': 'HIGHTECH INFORMATION', '00C0B8': "FRASER'S HILL LTD.", '00C062': 'IMPULSE TECHNOLOGY', '00C0EC': 'DAUPHIN TECHNOLOGY', '00C086': 'THE LYNK CORPORATION', '00C0CA': 'ALFA, INC.', '00C06C': 'SVEC COMPUTER CORP.', '0040FF': 'TELEBIT CORPORATION', '00401F': 'COLORGRAPH LTD', '0040AF': 'DIGITAL PRODUCTS, INC.', '00C0DB': 'IPC CORPORATION (PTE) LTD.', '00C09B': 'RELIANCE COMM/TEC, R-TEC', '00C06A': 'ZAHNER-ELEKTRIK GMBH & CO. KG', '00C06B': 'OSI PLUS CORPORATION', '00C0D5': 'Werbeagentur Jürgen Siebert', '00C063': 'MORNING STAR TECHNOLOGIES, INC', '00C021': 'NETEXPRESS', '00C016': 'ELECTRONIC THEATRE CONTROLS', '00C0BC': 'TELECOM AUSTRALIA/CSSC', '00C00A': 'MICRO CRAFT', '00C0E3': 'OSITECH COMMUNICATIONS, INC.', '00C0FE': 'APTEC COMPUTER SYSTEMS, INC.', '00C0C1': 'QUAD/GRAPHICS, INC.', '00C089': 'TELINDUS DISTRIBUTION', '00C0B0': 'GCC TECHNOLOGIES,INC.', '00C074': 'TOYODA AUTOMATIC LOOM', '00403C': 'FORKS, INC.', '004042': 'N.A.T. GMBH', '0040F2': 'JANICH & KLASS COMPUTERTECHNIK', '0040A2': 'KINGSTAR TECHNOLOGY INC.', '0040DC': 'TRITEC ELECTRONIC GMBH', '00404E': 'FLUENT, INC.', '00408D': 'THE GOODYEAR TIRE & RUBBER CO.', '00C026': 'LANS TECHNOLOGY CO., LTD.', '00401B': 'PRINTER SYSTEMS CORP.', '0040A3': 'MICROUNITY SYSTEMS ENGINEERING', '0040B3': 'ParTech Inc.', '00401D': 'INVISIBLE SOFTWARE, INC.', '00407E': 'EVERGREEN SYSTEMS, INC.', '00403E': 'RASTER OPS CORPORATION', '0040F7': 'Polaroid Corporation', '004037': 'SEA-ILAN, INC.', '004046': 'UDC RESEARCH LIMITED', '00404A': 'WEST AUSTRALIAN DEPARTMENT', '0040F9': 'COMBINET', '00C06F': 'KOMATSU LTD.', '00C0A7': 'SEEL LTD.', '00C04A': 'GROUP 2000 AG', '004060': 'COMENDEC LTD', '004056': 'MCM JAPAN LTD.', '004067': 'OMNIBYTE CORPORATION', '004054': 'CONNECTION MACHINES SERVICES', '004004': 'ICM CO. LTD.', '004058': 'KRONOS, INC.', '004018': 'ADOBE SYSTEMS, INC.', '004030': 'GK COMPUTER', '004040': 'RING ACCESS, INC.', '008057': 'ADSOFT, LTD.', '0080BB': 'HUGHES LAN SYSTEMS', '0040D0': 'MITAC INTERNATIONAL CORP.', '0040AB': 'ROLAND DG CORPORATION', '0040B6': 'COMPUTERM CORPORATION', '00C0D7': 'TAIWAN TRADING CENTER DBA', '0040DA': 'TELSPEC LTD', '0040A6': 'Cray, Inc.', '00403D': 'Teradata Corporation', '0040C3': 'FISCHER AND PORTER CO.', '0040EC': 'MIKASA SYSTEM ENGINEERING', '00401A': 'FUJI ELECTRIC CO., LTD.', '004025': 'MOLECULAR DYNAMICS', '0040C7': 'RUBY TECH CORPORATION', '004052': 'STAR TECHNOLOGIES, INC.', '00405F': 'AFE COMPUTERS LTD.', '004080': 'ATHENIX CORPORATION', '00402E': 'PRECISION SOFTWARE, INC.', '00402B': 'TRIGEM COMPUTER, INC.', '0040EE': 'OPTIMEM', '004051': 'Garbee and Garbee', '004049': 'Roche Diagnostics International Ltd.', '004029': 'Compex', '00409E': 'CONCURRENT TECHNOLOGIES LTD.', '00407A': "SOCIETE D'EXPLOITATION DU CNIT", '004031': 'KOKUSAI ELECTRIC CO., LTD', '0040D3': 'KIMPSION INTERNATIONAL CORP.', '004001': 'Zero One Technology Co. Ltd.', '004071': 'ATM COMPUTER GMBH', '004002': 'PERLE SYSTEMS LIMITED', '008054': 'FRONTIER TECHNOLOGIES CORP.', '008053': 'INTELLICOM, INC.', '008026': 'NETWORK PRODUCTS CORPORATION', '0080B0': 'ADVANCED INFORMATION', '0080FA': 'RWT GMBH', '0080FD': 'EXSCEED CORPRATION', '0080FE': 'AZURE TECHNOLOGIES, INC.', '0080F1': 'OPUS SYSTEMS', '008029': 'EAGLE TECHNOLOGY, INC.', '008072': 'MICROPLEX SYSTEMS LTD.', '00802F': 'NATIONAL INSTRUMENTS CORP.', '00800E': 'ATLANTIX CORPORATION', '0080AB': 'DUKANE NETWORK INTEGRATION', '00803C': 'TVS ELECTRONICS LTD', '008046': 'Tattile SRL ', '0080EC': 'SUPERCOMPUTING SOLUTIONS, INC.', '0080AD': 'CNET TECHNOLOGY, INC.', '008016': 'WANDEL AND GOLTERMANN', '0080A2': 'CREATIVE ELECTRONIC SYSTEMS', '00804B': 'EAGLE TECHNOLOGIES PTY.LTD.', '008011': "DIGITAL SYSTEMS INT'L. INC.", '0080A5': 'SPEED INTERNATIONAL', '008079': 'MICROBUS DESIGNS LTD.', '0080C8': 'D-LINK SYSTEMS, INC.', '008027': 'ADAPTIVE SYSTEMS, INC.', '0080FC': 'AVATAR CORPORATION', '0080E4': 'NORTHWEST DIGITAL SYSTEMS, INC', '00802C': 'THE SAGE GROUP PLC', '0080D6': 'NUVOTECH, INC.', '00800A': 'JAPAN COMPUTER CORP.', '008012': 'INTEGRATED MEASUREMENT SYSTEMS', '008034': 'SMT GOUPIL', '0080CC': 'MICROWAVE BYPASS SYSTEMS', '0080F6': 'SYNERGY MICROSYSTEMS', '000039': 'TOSHIBA CORPORATION', '00003C': 'AUSPEX SYSTEMS INC.', '00007E': 'CLUSTRIX CORPORATION', '0000CB': 'COMPU-SHACK ELECTRONIC GMBH', '0000A5': 'Tattile SRL ', '000036': 'ATARI CORPORATION', '0000F8': 'DIGITAL EQUIPMENT CORPORATION', '00807B': 'ARTEL COMMUNICATIONS CORP.', '00805C': 'AGILIS CORPORATION', '0080C5': 'NOVELLCO DE MEXICO', '008014': 'ESPRIT SYSTEMS', '000075': 'Nortel Networks', '0000ED': 'APRIL', '0000A3': 'NETWORK APPLICATION TECHNOLOGY', '000079': 'NETWORTH INCORPORATED', '000091': 'ANRITSU CORPORATION', '008078': 'PRACTICAL PERIPHERALS, INC.', '000044': 'CASTELLE CORPORATION', '00001A': 'ADVANCED MICRO DEVICES', '0080B7': 'STELLAR COMPUTER', '00007D': 'Oracle Corporation', '000096': 'MARCONI ELECTRONICS LTD.', '00005E': 'ICANN, IANA Department', '000038': 'CSS LABS', '000095': 'SONY TEKTRONIX CORP.', '000057': 'SCITEX CORPORATION LTD.', '0000D6': 'PUNCH LINE HOLDING', '0000CE': 'MEGADATA CORP.', '00007B': 'RESEARCH MACHINES', '000013': 'CAMEX', '00009E': 'MARLI S.A.', '000042': 'METIER MANAGEMENT SYSTEMS LTD.', '000060': 'KONTRON ELEKTRONIK GMBH', '08006F': 'PHILIPS APELDOORN B.V.', '00006A': 'COMPUTER CONSOLES INC.', '00000F': 'NEXT, INC.', '0000BB': 'TRI-DATA', '00007F': 'LINOTYPE-HELL AG', '08007A': 'INDATA', '080079': 'THE DROID WORKS', '080073': 'TECMAR INC.', '080062': 'General Dynamics', '08005C': 'FOUR PHASE SYSTEMS', '08005A': 'IBM Corp', '080072': 'XEROX CORP UNIV GRANT PROGRAM', '08006A': 'AT&T', '000040': 'APPLICON, INC.', '00005D': 'CS TELECOM', '000085': 'CANON INC.', '00004A': 'ADC CODENOLL TECHNOLOGY CORP.', '000012': 'INFORMATION TECHNOLOGY LIMITED', '00008A': 'DATAHOUSE INFORMATION SYSTEMS', '000032': 'Marconi plc', '08008F': 'CHIPCOM CORPORATION', '080019': 'GENERAL ELECTRIC CORPORATION', '027001': 'RACAL-DATACOM', '080040': 'FERRANTI COMPUTER SYS. LIMITED', '08003A': 'ORCATECH INC.', '08003D': 'CADNETIX CORPORATIONS', '080038': 'BULL S.A.S.', '08002F': 'PRIME COMPUTER INC.', '08003E': 'CODEX CORPORATION', '08002C': 'BRITTON LEE INC.', '08004D': 'CORVUS SYSTEMS INC.', '08001E': 'APOLLO COMPUTER INC.', '080052': 'INSYSTEC', '080001': 'COMPUTERVISION CORPORATION', '080005': 'SYMBOLICS INC.', '00DD07': 'UNGERMANN-BASS INC.', '000008': 'XEROX CORPORATION', '00003D': 'UNISYS', '00DD0D': 'UNGERMANN-BASS INC.', '080064': 'Sitasys AG', '080002': 'BRIDGE COMMUNICATIONS INC.', '08001A': 'TIARA/ 10NET', '08008B': 'PYRAMID TECHNOLOGY CORP.', '080012': 'BELL ATLANTIC INTEGRATED SYST.', '00000B': 'MATRIX CORPORATION', '00009B': 'INFORMATION INTERNATIONAL, INC', '08000E': 'NCR CORPORATION', '00DD09': 'UNGERMANN-BASS INC.', '000002': 'XEROX CORPORATION', '000003': 'XEROX CORPORATION', '080016': 'BARRISTER INFO SYS CORP', '000006': 'XEROX CORPORATION', '14A1BF': 'ASSA ABLOY Korea Co., Ltd Unilock', '9483C4': 'GL Technologies (Hong Kong) Limited', '9C93B0': 'Megatronix (Beijing) Technology Co., Ltd.', '984827': 'TP-LINK TECHNOLOGIES CO.,LTD.', '64AEF1': 'Qingdao Hisense Electronics Co.,Ltd.', '90B832': 'Aerohive Networks Inc.', '28BD89': 'Google, Inc.', 'D4F5EF': 'Hewlett Packard Enterprise', '44CB8B': 'LG Innotek', 'B4055D': 'Inspur Electronic Information Industry Co.,Ltd.', 'D8A315': 'vivo Mobile Communication Co., Ltd.', '70F82B': 'DWnet Technologies(Suzhou) Corporation', '24FD0D': 'INDÚSTRIA DE TELECOMUNICAÇÃO ELETRÔNICA', 'EC1BBD': 'Silicon Laboratories', '64F9C0': 'ANALOG DEVICES', '80647A': 'Ola Sense Inc', 'C0B883': 'Intel Corporate', '34ED1B': 'Cisco Systems, Inc', '142475': '4DReplay, Inc', 'BCA511': 'NETGEAR', '10DCB6': 'IEEE Registration Authority', 'F89E28': 'Cisco Meraki', 'F8C4F3': 'Shanghai Infinity Wireless Technologies Co.,Ltd.', '18D0C5': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'D4772B': 'Nanjing Ztlink Network Technology Co.,Ltd', 'C4E90A': 'D-Link International', 'C02E26': 'Private', '000A7B': 'Cornelius Consult', '30E98E': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C4447D': 'HUAWEI TECHNOLOGIES CO.,LTD', 'ACBD70': 'Huawei Device Co., Ltd.', 'C809A8': 'Intel Corporate', '748B34': 'Shanghai Smart System Technology Co., Ltd', '34D262': 'SZ DJI TECHNOLOGY CO.,LTD', '0812A5': 'Amazon Technologies Inc.', '807FF8': 'Juniper Networks', '440377': 'IEEE Registration Authority', '002487': 'Transact Campus, Inc.', '38E8EE': 'Nanjing Youkuo Electric Technology Co., Ltd', '80E455': 'New H3C Technologies Co., Ltd', '4CBC72': 'Primex Wireless', 'B4E9A3': 'port industrial automation GmbH', '90B8E0': 'SHENZHEN YANRAY TECHNOLOGY CO.,LTD', '9CF531': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '2C4CC6': 'Murata Manufacturing Co., Ltd.', '7C210D': 'Cisco Systems, Inc', '6802B8': 'Compal Broadband Networks, Inc.', '3463D4': 'BIONIX SUPPLYCHAIN TECHNOLOGIES SLU', '08F7E9': 'HRCP Research and Development Partnership', '7C8AE1': 'COMPAL INFORMATION (KUNSHAN) CO., LTD. ', '54E4A9': 'BHR Tech GmbH', '208058': 'Ciena Corporation', '684AAE': 'HUAWEI TECHNOLOGIES CO.,LTD', '60D755': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A445CD': 'IoT Diagnostics', '946269': 'ARRIS Group, Inc.', 'D0DD49': 'Juniper Networks', 'C863FC': 'ARRIS Group, Inc.', '1CCCD6': 'Xiaomi Communications Co Ltd', 'A8C252': 'Huawei Device Co., Ltd.', 'A04147': 'Huawei Device Co., Ltd.', '1CAECB': 'HUAWEI TECHNOLOGIES CO.,LTD', '4CF19E': 'Groupe Atlantic', '04ED33': 'Intel Corporate', '2036D7': 'Shanghai Reacheng Communication Technology Co.,Ltd', '68070A': 'TPVision Europe B.V', '4CEBBD': 'CHONGQING FUGUI ELECTRONICS CO.,LTD.', '7CC926': 'Wuhan GreeNet Information Service Co.,Ltd.', '5C75AF': 'Fitbit, Inc.', '38BAB0': 'Broadcom', '70879E': 'Beken Corporation', 'B45062': 'EmBestor Technology Inc.', '044A6C': 'HUAWEI TECHNOLOGIES CO.,LTD', '38FB14': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F0E4A2': 'HUAWEI TECHNOLOGIES CO.,LTD', '7C5189': 'SG Wireless Limited', '7CB27D': 'Intel Corporate', '1063C8': 'Liteon Technology Corporation', '582059': 'Xiaomi Communications Co Ltd', '90272B': 'Algorab S.r.l.', '4CBCB4': 'ABB SpA - DIN Rail', '94D505': 'Fiberhome Telecommunication Technologies Co.,LTD', '74E1B6': 'Apple, Inc.', '24A52C': 'HUAWEI TECHNOLOGIES CO.,LTD', '482759': 'Levven Electronics Ltd.', 'AC7713': 'Honeywell Safety Products (Shanghai) Co.,Ltd', '08849D': 'Amazon Technologies Inc.', '90BDE6': 'Quectel Wireless Solutions Co., Ltd.', '18A4A9': 'Vanu Inc.', '80E82C': 'Hewlett Packard', 'D4ADBD': 'Cisco Systems, Inc', '2440AE': 'NIIC Technology Co., Ltd.', 'F40E01': 'Apple, Inc.', '1495CE': 'Apple, Inc.', '50DE06': 'Apple, Inc.', '5CD135': 'Xtreme Power Systems', '7869D4': 'Shenyang Vibrotech Instruments Inc.', '082697': 'Zyxel Communications Corporation', 'CCCCCC': 'Silicon Laboratories', 'CC660A': 'Apple, Inc.', 'FC1D43': 'Apple, Inc.', '64CB9F': 'TECNO MOBILE LIMITED', '4CFBFE': 'Sercomm Japan Corporation', 'C0CBF1': 'Mobiwire Mobiles (NingBo) Co., LTD', 'FC7D6C': 'HYESUNG TECHWIN Co., Ltd', 'E47E9A': 'zte corporation', '2C16BD': 'IEEE Registration Authority', '30A889': 'DECIMATOR DESIGN', 'B4A2EB': 'IEEE Registration Authority', '1CD5E2': 'Shenzhen YOUHUA Technology Co., Ltd', '0024E9': 'Samsung Electronics Co.,Ltd', '683B78': 'Cisco Systems, Inc', 'F860F0': 'Aruba, a Hewlett Packard Enterprise Company', '5CA1E0': 'EmbedWay Technologies', '84D412': 'Palo Alto Networks', '68AB09': 'Nokia', 'F4CE36': 'Nordic Semiconductor ASA', 'B46077': 'Sichuan Changhong Electric Ltd.', '00F620': 'Google, Inc.', 'F43328': 'CIMCON Lighting Inc.', '7C942A': 'HUAWEI TECHNOLOGIES CO.,LTD', '1CB796': 'HUAWEI TECHNOLOGIES CO.,LTD', '3847BC': 'HUAWEI TECHNOLOGIES CO.,LTD', '549209': 'HUAWEI TECHNOLOGIES CO.,LTD', '745909': 'HUAWEI TECHNOLOGIES CO.,LTD', '5C5AC7': 'Cisco Systems, Inc', '3CB74B': 'Technicolor CH USA Inc.', '00EDB8': 'KYOCERA Corporation ', '9C99CD': 'Voippartners', 'C4C603': 'Cisco Systems, Inc', 'BCA13A': 'SES-imagotag', '2823F5': 'China Mobile (Hangzhou) Information Technology Co., Ltd.', 'F010AB': 'China Mobile (Hangzhou) Information Technology Co., Ltd.', 'B4DC09': 'Guangzhou Dawei Communication Co.,Ltd', '98865D': 'Nokia Shanghai Bell Co., Ltd.', '7CB59B': 'TP-LINK TECHNOLOGIES CO.,LTD.', '2C4F52': 'Cisco Systems, Inc', '68A03E': 'HUAWEI TECHNOLOGIES CO.,LTD', 'B8C385': 'HUAWEI TECHNOLOGIES CO.,LTD', '4CE9E4': 'New H3C Technologies Co., Ltd', 'ACDB48': 'ARRIS Group, Inc.', 'C80D32': 'Holoplot GmbH', 'D05794': 'Sagemcom Broadband SAS', '04D9F5': 'ASUSTek COMPUTER INC.', 'B891C9': 'Handreamnet', 'C8A776': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A400E2': 'HUAWEI TECHNOLOGIES CO.,LTD', 'B4C4FC': 'Xiaomi Communications Co Ltd', 'C8D69D': 'Arab International Optronics', '405BD8': 'CHONGQING FUGUI ELECTRONICS CO.,LTD.', '54EC2F': 'Ruckus Wireless', '2899C7': 'LINDSAY BROADBAND INC', 'FCBD67': 'Arista Networks', '00257E': 'NEW POS TECHNOLOGY LIMITED', '487746': 'Calix Inc.', 'F8AE27': 'John Deere Electronic Solutions', '7445CE': 'CRESYN', 'C4F7D5': 'Cisco Systems, Inc', '1C6499': 'Comtrend Corporation', '686DBC': 'Hangzhou Hikvision Digital Technology Co.,Ltd.', '10DC4A': 'Fiberhome Telecommunication Technologies Co.,LTD', '88EF16': 'ARRIS Group, Inc.', '8CA96F': 'D&M Holdings Inc.', '7CD661': 'Xiaomi Communications Co Ltd', 'B0FD0B': 'IEEE Registration Authority', '98B8BA': 'LG Electronics (Mobile Communications)', '0CE99A': 'ATLS ALTEC', '4C11AE': 'Espressif Inc.', '8C89FA': 'Zhejiang Hechuan Technology Co., Ltd.', '4CBC48': 'Cisco Systems, Inc', '80D04A': 'Technicolor CH USA Inc.', '48D875': 'China TransInfo Technology Co., Ltd', 'D4789B': 'Cisco Systems, Inc', '483FE9': 'HUAWEI TECHNOLOGIES CO.,LTD', '143CC3': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A8E544': 'HUAWEI TECHNOLOGIES CO.,LTD', '1820D5': 'ARRIS Group, Inc.', '3050FD': 'Skyworth Digital Technology(Shenzhen) Co.,Ltd', '0040BC': 'ALGORITHMICS LTD.', '004065': 'GTE SPACENET', '88E64B': 'Juniper Networks', 'D8D090': 'Dell Inc.', '50C4DD': 'BUFFALO.INC', '0084ED': 'Private', 'E002A5': 'ABB Robotics', 'F42E7F': 'Aruba, a Hewlett Packard Enterprise Company', 'B4CC04': 'Piranti', 'B8D526': 'Zyxel Communications Corporation', 'F0B968': 'ITEL MOBILE LIMITED', '04E56E': 'THUB Co., ltd.', '1C7F2C': 'HUAWEI TECHNOLOGIES CO.,LTD', '88BCC1': 'HUAWEI TECHNOLOGIES CO.,LTD', '1CBFCE': 'Shenzhen Century Xinyang Technology Co., Ltd', 'F83002': 'Texas Instruments', 'A8A159': 'ASRock Incorporation', 'ECADE0': 'D-Link International', '78DA07': 'Zhejiang Tmall Technology Co., Ltd.', '44A61E': 'INGRAM MICRO SERVICES', '38D2CA': 'Zhejiang Tmall Technology Co., Ltd.', '109E3A': 'Zhejiang Tmall Technology Co., Ltd.', '904DC3': 'Flonidan A/S', '000DF1': 'IONIX INC.', '00077C': 'Westermo Network Technologies AB', '8C426D': 'HUAWEI TECHNOLOGIES CO.,LTD', '90F891': 'Kaonmedia CO., LTD.', '445D5E': 'SHENZHEN Coolkit Technology CO.,LTD', '50AD71': 'Tessolve Semiconductor Private Limited', '202AC5': 'Petite-En', 'A8BF3C': 'HDV Phoelectron Technology Limited', 'D4F527': 'SIEMENS AG', 'B8B2F8': 'Apple, Inc.', '98460A': 'Apple, Inc.', 'B85D0A': 'Apple, Inc.', '7C9A1D': 'Apple, Inc.', '103025': 'Apple, Inc.', '70ACD7': 'Shenzhen YOUHUA Technology Co., Ltd', '5462E2': 'Apple, Inc.', '149D99': 'Apple, Inc.', '147BAC': 'Nokia', '906D05': 'BXB ELECTRONICS CO., LTD', 'D4BBC8': 'vivo Mobile Communication Co., Ltd.', '489507': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '24BF74': 'Private', 'CCDC55': 'Dragonchip Limited', 'A4C3F0': 'Intel Corporate', '28FFB2': 'Toshiba Corp.', '1C60D2': 'Fiberhome Telecommunication Technologies Co.,LTD', 'F4B5AA': 'zte corporation', 'E8ACAD': 'zte corporation', '0836C9': 'NETGEAR', '745BC5': 'IEEE Registration Authority', '9440C9': 'Hewlett Packard Enterprise', 'A041A7': 'NL Ministry of Defense', 'E446DA': 'Xiaomi Communications Co Ltd', '1C12B0': 'Amazon Technologies Inc.', '4CC8A1': 'Cisco Meraki', '4CBC98': 'IEEE Registration Authority', '2CF432': 'Espressif Inc.', '647366': 'Shenzhen Siera Technology Ltd', '041EFA': 'BISSELL Homecare, Inc.', 'D85575': 'Samsung Electronics Co.,Ltd', 'D411A3': 'Samsung Electronics Co.,Ltd', '04BA8D': 'Samsung Electronics Co.,Ltd', '744D28': 'Routerboard.com', 'A41162': 'Arlo Technology', '00A085': 'Private', 'E05A9F': 'IEEE Registration Authority', '8C5AF8': 'Beijing Xiaomi Electronics Co., Ltd.', 'D45800': 'Fiberhome Telecommunication Technologies Co.,LTD', '90842B': 'LEGO System A/S', '00267E': 'PARROT SA', '2C557C': 'Shenzhen YOUHUA Technology Co., Ltd', 'F4BCDA': 'Shenzhen Jingxun Software Telecommunication Technology Co.,Ltd', '000915': 'CAS Corp.', '58D9C3': 'Motorola Mobility LLC, a Lenovo Company', '2C73A0': 'Cisco Systems, Inc', '443E07': 'Electrolux', '8485E6': 'Guangdong Asano Technology CO.,Ltd.', '3C8375': 'Microsoft Corporation', '000AA8': 'ePipe Pty. Ltd.', '0029C2': 'Cisco Systems, Inc', '187C0B': 'Ruckus Wireless', 'D47B35': 'NEO Monitors AS', '000878': 'Benchmark Storage Innovations', '6CF37F': 'Aruba, a Hewlett Packard Enterprise Company', '140708': 'Private', '78A7EB': '1MORE', '485D36': 'Verizon ', '20C047': 'Verizon ', '346B46': 'Sagemcom Broadband SAS', '7C604A': 'Avelon', '186472': 'Aruba, a Hewlett Packard Enterprise Company', '84D47E': 'Aruba, a Hewlett Packard Enterprise Company', '24DEC6': 'Aruba, a Hewlett Packard Enterprise Company', '0017A0': 'RoboTech srl', '103D0A': 'Hui Zhou Gaoshengda Technology Co.,LTD', '942790': 'TCT mobile ltd', 'A41791': 'Shenzhen Decnta Technology Co.,LTD.', '34DAB7': 'zte corporation', '109C70': 'Prusa Research s.r.o.', 'E84C56': 'INTERCEPT SERVICES LIMITED', 'A41908': 'Fiberhome Telecommunication Technologies Co.,LTD', '38AFD0': 'Private', '80D336': 'CERN', '64255E': 'Observint Technologies, Inc.', '90940A': 'Analog Devices, Inc', '40B076': 'ASUSTek COMPUTER INC.', 'D43D39': 'Dialog Semiconductor', '0014A5': 'Gemtek Technology Co., Ltd.', 'C0B5D7': 'CHONGQING FUGUI ELECTRONICS CO.,LTD.', 'D4AD71': 'Cisco Systems, Inc', '702B1D': 'E-Domus International Limited', 'F085C1': 'SHENZHEN RF-LINK TECHNOLOGY CO.,LTD.', '60380E': 'ALPS ELECTRIC CO., LTD.', 'FC62B9': 'ALPS ELECTRIC CO., LTD.', '0002C7': 'ALPS ELECTRIC CO., LTD.', '001E3D': 'ALPS ELECTRIC CO., LTD.', '28A183': 'ALPS ELECTRIC CO., LTD.', '4CDD7D': 'LHP Telematics LLC', '48F07B': 'ALPS ELECTRIC CO., LTD.', 'B8BC5B': 'Samsung Electronics Co.,Ltd', '108EBA': 'Molekule', '4C218C': 'Panasonic India Private limited', '2C4E7D': 'Chunghua Intelligent Network Equipment Inc.', 'A4F465': 'ITEL MOBILE LIMITED', '4C917A': 'IEEE Registration Authority', 'F48CEB': 'D-Link International', '743A65': 'NEC Corporation', '00255C': 'NEC Corporation', '684F64': 'Dell Inc.', 'CC70ED': 'Cisco Systems, Inc', '907E30': 'LARS', '84EB3E': 'Vivint Smart Home', '00A0D5': 'Sierra Wireless', '18BB26': 'FN-LINK TECHNOLOGY LIMITED', '18B905': 'Hong Kong Bouffalo Lab Limited', 'ECF0FE': 'zte corporation', '94A40C': 'Diehl Metering GmbH', '70B317': 'Cisco Systems, Inc', 'B00247': 'AMPAK Technology, Inc.', 'BCE796': 'Wireless CCTV Ltd', '948FCF': 'ARRIS Group, Inc.', 'A8F5DD': 'ARRIS Group, Inc.', '44D3AD': 'Shenzhen TINNO Mobile Technology Corp.', '9C8275': 'Yichip\xa0Microelectronics (Hangzhou) Co.,Ltd', '5CCBCA': 'FUJIAN STAR-NET COMMUNICATION CO.,LTD', '28E98E': 'Mitsubishi Electric Corporation', '34F8E7': 'Cisco Systems, Inc', 'B0907E': 'Cisco Systems, Inc', '2C7360': 'Earda Technologies co Ltd', '508CF5': 'China Mobile Group Device Co.,Ltd.', '1C549E': 'Universal Electronics, Inc.', 'E4CA12': 'zte corporation', 'D49E05': 'zte corporation', '585FF6': 'zte corporation', '40B30E': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '04CE7E': 'NXP France Semiconductors France', 'C09AD0': 'Apple, Inc.', '94B01F': 'Apple, Inc.', '98CC4D': 'Shenzhen mantunsci co., LTD', 'C01B23': 'SICHUAN TIANYI COMHEART TELECOM CO.,LTD', 'B8C74A': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'D8CE3A': 'Xiaomi Communications Co Ltd', '102C6B': 'AMPAK Technology, Inc.', '7485C4': 'New H3C Technologies Co., Ltd', '94F6D6': 'Apple, Inc.', 'F82D7C': 'Apple, Inc.', 'D0BAE4': 'Shanghai MXCHIP Information Technology Co., Ltd.', '48D35D': 'Private', '80FBF0': 'Quectel Wireless Solutions Co., Ltd.', 'C0132B': 'Sichuan Changhong Electric Ltd.', '0CB4A4': 'Xintai Automobile Intelligent Network Technology', '90633B': 'Samsung Electronics Co.,Ltd', 'FCAAB6': 'Samsung Electronics Co.,Ltd', 'C82E47': 'Suzhou SmartChip Semiconductor Co., LTD', 'C02250': 'Koss Corporation', 'A0B549': 'Arcadyan Corporation', '001F5A': 'Beckwith Electric Co.', '985D82': 'Arista Networks', '2453BF': 'Enernet', '043385': 'Nanchang BlackShark Co.,Ltd.', '84E5D8': 'Guangdong UNIPOE IoT Technology Co.,Ltd.', 'A8BC9C': 'Cloud Light Technology Limited', 'A89042': 'Beijing Wanwei Intelligent Technology Co., Ltd.', '18BE92': 'Delta Networks, Inc.', '90C54A': 'vivo Mobile Communication Co., Ltd.', 'BC7596': 'Beijing Broadwit Technology Co., Ltd.', '1C34DA': 'Mellanox Technologies, Inc.', '2CA02F': 'Veroguard Systems Pty Ltd', '6C5C3D': 'IEEE Registration Authority', '782327': 'Samsung Electronics Co.,Ltd', 'DCF756': 'Samsung Electronics Co.,Ltd', '68A47D': 'Sun Cupid Technology (HK) LTD', '184B0D': 'Ruckus Wireless', 'D41243': 'AMPAK Technology, Inc.', '48A6B8': 'Sonos, Inc.', 'B87826': 'Nintendo Co.,Ltd', '5076AF': 'Intel Corporate', 'DCCBA8': 'Explora Technologies Inc', 'C07878': 'FLEXTRONICS MANUFACTURING(ZHUHAI)CO.,LTD.', 'E4B97A': 'Dell Inc.', '001636': 'QUANTA COMPUTER INC.', '34DAC1': 'SAE Technologies Development(Dongguan) Co., Ltd.', '705DCC': 'EFM Networks', 'D092FA': 'Fiberhome Telecommunication Technologies Co.,LTD', 'E85AD1': 'Fiberhome Telecommunication Technologies Co.,LTD', 'A823FE': 'LG Electronics', 'E05D5C': 'Oy Everon Ab', '688F2E': 'Hitron Technologies. Inc', 'E046E5': 'Gosuncn Technology Group Co., Ltd.', '1C599B': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D4BD1E': '5VT Technologies,Taiwan LTd.', 'BC9B68': 'Technicolor CH USA Inc.', 'CCD4A1': 'MitraStar Technology Corp.', '08BA5F': 'Qingdao Hisense Electronics Co.,Ltd.', '10DFFC': 'Siemens AG', '847F3D': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', 'C4FDE6': 'DRTECH', 'CC988B': 'SONY Visual Products Inc.', '30E3D6': 'Spotify USA Inc.', '9CA525': 'Shandong USR IOT Technology Limited', '787D53': 'Aerohive Networks Inc.', 'E0456D': 'China Mobile Group Device Co.,Ltd.', '283926': 'CyberTAN Technology Inc.', '8CFCA0': 'Shenzhen Smart Device Technology Co., LTD.', '1C427D': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '806933': 'HUAWEI TECHNOLOGIES CO.,LTD', 'BC26C7': 'Cisco Systems, Inc', 'BC5EA1': 'PsiKick, Inc.', '944F4C': 'Sound United LLC', '981888': 'Cisco Meraki', '18810E': 'Apple, Inc.', '608C4A': 'Apple, Inc.', '74B587': 'Apple, Inc.', 'FCB6D8': 'Apple, Inc.', '3C6A2C': 'IEEE Registration Authority', '241B7A': 'Apple, Inc.', '8CFE57': 'Apple, Inc.', 'C0A600': 'Apple, Inc.', '18E829': 'Ubiquiti Networks Inc.', 'E0C286': 'Aisai Communication Technology Co., Ltd.', '7405A5': 'TP-LINK TECHNOLOGIES CO.,LTD.', '286DCD': 'Beijing Winner Microelectronics Co.,Ltd. ', '541031': 'SMARTO', '44A466': 'GROUPE LDLC', 'D80D17': 'TP-LINK TECHNOLOGIES CO.,LTD.', '18C2BF': 'BUFFALO.INC', 'E81CBA': 'Fortinet, Inc.', 'F0B014': 'AVM Audiovisuelles Marketing und Computersysteme GmbH', '1889A0': 'Wuhan Funshion Online Technologies Co.,Ltd', '0C2A86': 'Fiberhome Telecommunication Technologies Co.,LTD', 'FC61E9': 'Fiberhome Telecommunication Technologies Co.,LTD', '405662': 'GuoTengShengHua Electronics LTD.', 'E4DB6D': 'Beijing Xiaomi Electronics Co., Ltd.', '00A0D1': 'INVENTEC CORPORATION', '0018CC': 'AXIOHM SAS', '001827': 'NEC UNIFIED SOLUTIONS NEDERLAND B.V.', '009004': '3COM EUROPE LTD', '00068C': '3COM', '02608C': '3COM', '00D0D8': '3COM', '18937F': 'AMPAK Technology, Inc.', 'A43523': 'Guangdong Donyan Network Technologies Co.,Ltd.', 'B4A94F': 'MERCURY CORPORATION', '803AF4': 'Fiberhome Telecommunication Technologies Co.,LTD', '48A0F8': 'Fiberhome Telecommunication Technologies Co.,LTD', 'F85E3C': 'SHENZHEN ZHIBOTONG ELECTRONICS CO.,LTD', '283E76': 'Common Networks', 'DC3979': 'Cisco Systems, Inc', '58D56E': 'D-Link International', '0C5331': 'ETH Zurich', 'DC9088': 'HUAWEI TECHNOLOGIES CO.,LTD', '54812D': 'PAX Computer Technology(Shenzhen) Ltd.', '0C9A42': 'FN-LINK TECHNOLOGY LIMITED', '000809': 'Systemonic AG', '8C41F4': 'IPmotion GmbH', '704F08': 'Shenzhen Huisheng Information Technology Co., Ltd.', '8835C1': 'OI ELECTRIC CO.,LTD', '3042A1': 'ilumisys Inc. DBA Toggled', '0026B7': 'Kingston Technology Company, Inc.', '28D0CB': 'Cambridge Communication Systems Ltd', '44657F': 'Calix Inc.', '4062EA': 'China Mobile Group Device Co.,Ltd.', '4C0FC7': 'Earda Technologies co Ltd', '80A796': 'Neurotek LLC', 'CC2119': 'Samsung Electronics Co.,Ltd', '302303': 'Belkin International Inc.', '9CF6DD': 'IEEE Registration Authority', '001E80': 'Icotera A/S', '48881E': 'EthoSwitch LLC', '3C71BF': 'Espressif Inc.', '000393': 'Apple, Inc.', '0000C3': 'Harris Corporation', '304B07': 'Motorola Mobility LLC, a Lenovo Company', '345ABA': 'tcloud intelligence', '502FA8': 'Cisco Systems, Inc', 'B46921': 'Intel Corporate', '902BD2': 'HUAWEI TECHNOLOGIES CO.,LTD', '08D59D': 'Sagemcom Broadband SAS', 'C08359': 'IEEE Registration Authority', 'EC83D5': 'GIRD Systems Inc', '14942F': 'USYS CO.,LTD.', 'FCB10D': 'Shenzhen Tian Kun Technology Co.,LTD.', '20F77C': 'vivo Mobile Communication Co., Ltd.', '001EEC': 'COMPAL INFORMATION (KUNSHAN) CO., LTD. ', 'F0761C': 'COMPAL INFORMATION (KUNSHAN) CO., LTD. ', '0004AE': 'Sullair Corporation', '00451D': 'Cisco Systems, Inc', 'A0D635': 'WBS Technology', '34800D': 'Cavium Inc', 'B44BD6': 'IEEE Registration Authority', 'D8912A': 'Zyxel Communications Corporation', '3C427E': 'IEEE Registration Authority', '000BA3': 'Siemens AG', '000C8A': 'Bose Corporation', '243A82': 'IRTS', '880907': 'MKT Systemtechnik GmbH & Co. KG', '58A48E': 'PixArt Imaging Inc.', '30D659': 'Merging Technologies SA', '702AD5': 'Samsung Electronics Co.,Ltd', '889765': 'exands', '386E88': 'zte corporation', 'B88584': 'Dell Inc.', '40EEDD': 'HUAWEI TECHNOLOGIES CO.,LTD', 'B01886': 'SmarDTV', 'AC751D': 'HUAWEI TECHNOLOGIES CO.,LTD', '289E97': 'HUAWEI TECHNOLOGIES CO.,LTD', '001525': 'Chamberlain Access Solutions', '001EB0': 'ImesD Electronica S.L.', '641C67': 'DIGIBRAS INDUSTRIA DO BRASILS/A', '60058A': 'Hitachi Metals, Ltd.', 'BC22FB': 'RF Industries', '0080B6': 'Mercury Systems – Trusted Mission Solutions, Inc. ', '08512E': 'Orion Diagnostica Oy', '98A404': 'Ericsson AB', '00CC3F': 'Universal Electronics, Inc.', '74B91E': 'Nanjing Bestway Automation System Co., Ltd', 'A019B2': 'IEEE Registration Authority', '8C15C7': 'HUAWEI TECHNOLOGIES CO.,LTD', '60FA9D': 'HUAWEI TECHNOLOGIES CO.,LTD', 'DC9914': 'HUAWEI TECHNOLOGIES CO.,LTD', '4C3FD3': 'Texas Instruments', 'B05365': 'China Mobile IOT Company Limited', '308841': 'Sichuan\xa0AI-Link\xa0Technology\xa0Co.,\xa0Ltd.', '44EFCF': 'UGENE SOLUTION inc.', '304596': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C0F4E6': 'HUAWEI TECHNOLOGIES CO.,LTD', '74EB80': 'Samsung Electronics Co.,Ltd', '0CE0DC': 'Samsung Electronics Co.,Ltd', 'D868C3': 'Samsung Electronics Co.,Ltd', 'C493D9': 'Samsung Electronics Co.,Ltd', 'A82BB9': 'Samsung Electronics Co.,Ltd', 'ACFD93': 'WEIFANG GOERTEK ELECTRONICS CO.,LTD', '68572D': 'HANGZHOU AIXIANGJI TECHNOLOGY CO., LTD', '00B8C2': 'Heights Telecom T ltd', 'F4BF80': 'HUAWEI TECHNOLOGIES CO.,LTD', '000E8F': 'Sercomm Corporation.', 'A0E617': 'MATIS', '7001B5': 'Cisco Systems, Inc', '001F49': 'Manhattan TV Ltd', '88D652': 'AMERGINT Technologies', 'FC90FA': 'Independent Technologies', 'D0B214': 'PoeWit Inc', 'C42456': 'Palo Alto Networks', 'B4B686': 'Hewlett Packard', '4CEDFB': 'ASUSTek COMPUTER INC.', '7C2EBD': 'Google, Inc.', '6CAF15': 'Webasto SE', 'E4E130': 'TCT mobile ltd', '0C2138': 'Hengstler GmbH', 'E46059': 'Pingtek Co., Ltd.', 'E0191D': 'HUAWEI TECHNOLOGIES CO.,LTD', '68D1BA': 'Shenzhen YOUHUA Technology Co., Ltd', '1C1AC0': 'Apple, Inc.', '3078C2': 'Innowireless / QUCELL Networks', '4050B5': 'Shenzhen New Species Technology Co., Ltd.', '3C15FB': 'HUAWEI TECHNOLOGIES CO.,LTD', 'CC934A': 'Sierra Wireless', '00CFC0': 'China Mobile Group Device Co.,Ltd.', 'DC330D': 'QING DAO HAIER TELECOM CO.,LTD.', '688975': 'nuoxc', '40F04E': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '0021F2': 'EASY3CALL Technology Limited', '0015C4': 'FLOVEL CO., LTD.', '0011E6': 'Scientific Atlanta', '24F128': 'Telstra', 'E0383F': 'zte corporation', 'D47226': 'zte corporation', '40831D': 'Apple, Inc.', 'DCD3A2': 'Apple, Inc.', '5C1DD9': 'Apple, Inc.', '800588': 'Ruijie Networks Co.,LTD', 'F00E1D': 'Megafone Limited', '88AE07': 'Apple, Inc.', '68FEF7': 'Apple, Inc.', 'AC35EE': 'FN-LINK TECHNOLOGY LIMITED', '881196': 'HUAWEI TECHNOLOGIES CO.,LTD', 'E40EEE': 'HUAWEI TECHNOLOGIES CO.,LTD', '28D997': 'Yuduan Mobile Co., Ltd.', '301F9A': 'IEEE Registration Authority', '0C2C54': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D4C19E': 'Ruckus Wireless', '70695A': 'Cisco Systems, Inc', '00BF77': 'Cisco Systems, Inc', 'D07714': 'Motorola Mobility LLC, a Lenovo Company', '30B7D4': 'Hitron Technologies. Inc', 'B481BF': 'Meta-Networks, LLC', '946AB0': 'Arcadyan Corporation', '4818FA': 'Nocsys', '587A6A': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'A038F8': 'OURA Health Oy', '687924': 'ELS-GmbH & Co. KG', '28FD80': 'IEEE Registration Authority', '0CAE7D': 'Texas Instruments', '304511': 'Texas Instruments', 'E81AAC': 'ORFEO SOUNDWORKS Inc.', '000758': 'DragonWave Inc.', 'F0FCC8': 'ARRIS Group, Inc.', 'F8DF15': 'Sunitec Enterprise Co.,Ltd', '001DB5': 'Juniper Networks', 'B02680': 'Cisco Systems, Inc', 'D49398': 'Nokia Corporation', '001937': 'CommerceGuard AB', 'FC7C02': 'Phicomm (Shanghai) Co., Ltd.', 'A8610A': 'ARDUINO AG', '6097DD': 'MicroSys Electronics GmbH', '047970': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A057E3': 'HUAWEI TECHNOLOGIES CO.,LTD', '1CB044': 'ASKEY COMPUTER CORP', 'AC17C8': 'Cisco Meraki', 'F4844C': 'Texas Instruments', '38DEAD': 'Intel Corporate', 'D46D6D': 'Intel Corporate', 'B4F2E8': 'ARRIS Group, Inc.', '3C574F': 'China Mobile Group Device Co.,Ltd.', 'D49CF4': 'Palo Alto Networks', '8C1645': 'LCFC(HeFei) Electronics Technology co., ltd', '689861': 'Beacon Inc', '609813': 'Shanghai Visking Digital Technology Co. LTD', '506B4B': 'Mellanox Technologies, Inc.', 'B41C30': 'zte corporation', '705AAC': 'Samsung Electronics Co.,Ltd', '2C9569': 'ARRIS Group, Inc.', 'A039EE': 'Sagemcom Broadband SAS', 'E4CB59': 'Beijing Loveair Science and Technology Co. Ltd.', 'B4E62D': 'Espressif Inc.', '847460': 'zte corporation', '4C82CF': 'Dish Technologies Corp', '285767': 'Dish Technologies Corp', '70169F': 'EtherCAT Technology Group', '68EF43': 'Apple, Inc.', 'D02B20': 'Apple, Inc.', '2C61F6': 'Apple, Inc.', 'D4A33D': 'Apple, Inc.', 'F0766F': 'Apple, Inc.', '4098AD': 'Apple, Inc.', '6C4D73': 'Apple, Inc.', '1CA0B8': 'Hon Hai Precision Ind. Co., Ltd.', 'D88466': 'Extreme Networks, Inc.', '000496': 'Extreme Networks, Inc.', '00E02B': 'Extreme Networks, Inc.', '5C0E8B': 'Extreme Networks, Inc.', '7467F7': 'Extreme Networks, Inc.', 'E43022': 'Hanwha Techwin Security Vietnam', '044F17': 'HUMAX Co., Ltd.', '5C4A1F': 'SICHUAN TIANYI COMHEART TELECOMCO., LTD', 'E48F34': 'Vodafone Italia S.p.A.', '685ACF': 'Samsung Electronics Co.,Ltd', '0CA8A7': 'Samsung Electronics Co.,Ltd', 'B0672F': 'Bowers & Wilkins', '10CD6E': 'FISYS', 'D86375': 'Xiaomi Communications Co Ltd', 'D89C67': 'Hon Hai Precision Ind. Co.,Ltd.', '64209F': 'Tilgin AB', 'A43E51': 'ANOV FRANCE', '702605': 'SONY Visual Products Inc.', '0090F1': 'Seagate Cloud Systems Inc', '845A81': 'ffly4u', 'CC81DA': 'Phicomm (Shanghai) Co., Ltd.', '00806C': 'Secure Systems & Services', '007263': 'Netcore Technology Inc.', '1C27DD': 'Datang Gohighsec(zhejiang)Information Technology Co.,Ltd.', 'B8C8EB': 'ITEL MOBILE LIMITED', '80C5F2': 'AzureWave Technology Inc.', '64F88A': 'China Mobile IOT Company Limited', '68DB54': 'Phicomm (Shanghai) Co., Ltd.', 'B45253': 'Seagate Technology', '0011C6': 'Seagate Technology', '001D38': 'Seagate Technology', '005013': 'Seagate Cloud Systems Inc', 'C8DF84': 'Texas Instruments', '240D6C': 'SMND', '48555C': 'Wu Qi Technologies,Inc.', '18F0E4': 'Xiaomi Communications Co Ltd', '588A5A': 'Dell Inc.', '9C8C6E': 'Samsung Electronics Co.,Ltd', 'DC4F22': 'Espressif Inc.', 'F86CE1': 'Taicang T&W Electronics', '1C7328': 'Connected Home', 'D8E004': 'Vodia Networks Inc', '2CFDAB': 'Motorola (Wuhan) Mobility Technologies Communication Co., Ltd.', '30B4B8': 'LG Electronics', '380E4D': 'Cisco Systems, Inc', '3873EA': 'IEEE Registration Authority', '4C5262': 'Fujitsu Technology Solutions GmbH', '803BF6': 'LOOK EASY INTERNATIONAL LIMITED', '30EB1F': 'Skylab M&C Technology Co.,Ltd', '549A4C': 'GUANGDONG HOMECARE TECHNOLOGY CO.,LTD. ', 'EC1D8B': 'Cisco Systems, Inc', 'EC7097': 'ARRIS Group, Inc.', '5819F8': 'ARRIS Group, Inc.', 'D07FC4': 'Ou Wei Technology Co.,Ltd. of Shenzhen City', '1479F3': 'China Mobile Group Device Co.,Ltd.', '0CCEF6': 'Guizhou Fortuneship Technology Co., Ltd', '1806FF': 'Acer Computer(Shanghai) Limited.', 'C4CD82': 'Hangzhou Lowan Information Technology Co., Ltd.', '30FB94': 'Shanghai Fangzhiwei Information Technology CO.,Ltd.', '0023A0': 'Hana CNS Co., LTD.', 'F406A5': 'Hangzhou Bianfeng Networking Technology Co., Ltd.', 'A4B52E': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '3CA581': 'vivo Mobile Communication Co., Ltd.', '34E911': 'vivo Mobile Communication Co., Ltd.', '64CBA3': 'Pointmobile', 'ECFABC': 'Espressif Inc.', '08BA22': 'Swaive Corporation', '28C13C': 'Hon Hai Precision Ind. Co., Ltd.', 'B0ECE1': 'Private', '60E78A': 'UNISEM', 'F8F21E': 'Intel Corporate', '282986': 'APC by Schneider Electric', '707DB9': 'Cisco Systems, Inc', '08BEAC': 'Edimax Technology Co. Ltd.', '248BE0': 'SICHUAN TIANYI COMHEART TELECOMCO., LTD', '002424': 'Ace Axis Limited', '50C9A0': 'SKIPPER AS', '7483EF': 'Arista Networks', '00E0F6': 'DECISION EUROPE', 'CC2DE0': 'Routerboard.com', '00BF61': 'Samsung Electronics Co.,Ltd', '7867D7': 'Apple, Inc.', 'B8C111': 'Apple, Inc.', '1046B4': 'FormericaOE', '9CE33F': 'Apple, Inc.', '386B1C': 'SHENZHEN MERCURY COMMUNICATION TECHNOLOGIES CO.,LTD.', 'DC5583': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '001248': 'Dell EMC', '006048': 'Dell EMC', '7CC95A': 'Dell EMC', 'D00401': 'Motorola Mobility LLC, a Lenovo Company', '742857': 'Mayfield Robotics', '589043': 'Sagemcom Broadband SAS', '2856C1': 'Harman International', 'B4A382': 'Hangzhou Hikvision Digital Technology Co.,Ltd.', '9C9C40': 'SICHUAN TIANYI COMHEART TELECOMCO., LTD', 'A407B6': 'Samsung Electronics Co.,Ltd', '40498A': 'Synapticon GmbH', '389D92': 'Seiko Epson Corporation', '24E124': 'Xiamen Ursaconn Technology Co. , Ltd.', '8C0F83': 'Angie Hospitality LLC', 'DC68EB': 'Nintendo Co.,Ltd', 'E8361D': 'Sense Labs, Inc.', '087808': 'Samsung Electronics Co.,Ltd', '887598': 'Samsung Electronics Co.,Ltd', 'C0174D': 'Samsung Electronics Co.,Ltd', '20F19E': 'ARRIS Group, Inc.', 'C89F42': 'VDII Innovation AB', '7091F3': 'Universal Electronics, Inc.', '080069': 'Silicon Graphics', '002291': 'Cisco Systems, Inc', '10FCB6': 'mirusystems CO.,LTD', '04D6AA': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', '50A83A': 'S Mobile Devices Limited', '6405E9': 'Shenzhen WayOS Technology Crop., Ltd.', 'A07099': 'Beijing Huacan Electronics Co., Ltd', '48D6D5': 'Google, Inc.', '0C5842': 'DME Micro', 'B810D4': 'Masimo Corporation', 'BC825D': 'MITSUMI ELECTRIC CO.,LTD.', 'D0666D': 'Shenzhen Bus-Lan Technology Co., Ltd.', '08152F': 'Samsung Electronics Co., Ltd. ARTIK', 'F4F5DB': 'Xiaomi Communications Co Ltd', 'F4E204': 'Traqueur', 'CC2237': 'IEEE Registration Authority', '38D620': 'Limidea Concept Pte. Ltd.', '74F91A': 'Onface', 'A434F1': 'Texas Instruments', '8CE38E': 'Toshiba Memory Corporation', '186024': 'Hewlett Packard', 'BC3D85': 'HUAWEI TECHNOLOGIES CO.,LTD', '2054FA': 'HUAWEI TECHNOLOGIES CO.,LTD', '38378B': 'HUAWEI TECHNOLOGIES CO.,LTD', '745C4B': 'GN Audio A/S', '00149D': 'Sound ID Inc.', 'A8E824': 'INIM ELECTRONICS S.R.L.', '104963': 'HARTING K.K.', '8CD48E': 'ITEL MOBILE LIMITED', '642B8A': 'ALL BEST Industrial Co., Ltd.', 'B8EE0E': 'Sagemcom Broadband SAS', 'ECD09F': 'Xiaomi Communications Co Ltd', '78E103': 'Amazon Technologies Inc.', '000659': 'EAL (Apeldoorn) B.V.', '78A6E1': 'Brocade Communications Systems, Inc.', 'E4EC10': 'Nokia Corporation', '002692': 'Mitsubishi Electric Corporation', '8CC121': 'Panasonic Corporation AVC Networks Company', 'EC0441': 'ShenZhen TIGO Semiconductor Co., Ltd.', 'ACBE75': 'Ufine Technologies Co.,Ltd.', '00C08F': 'Panasonic Electric Works Co., Ltd.', 'B0350B': 'MOBIWIRE MOBILES (NINGBO) CO.,LTD', '28A6AC': 'seca gmbh & co. kg', 'A8BE27': 'Apple, Inc.', 'C0A53E': 'Apple, Inc.', '444E6D': 'AVM Audiovisuelles Marketing und Computersysteme GmbH', '90B1E0': 'Beijing Nebula Link Technology Co., Ltd', '6C090A': 'GEMATICA SRL', '70E1FD': 'FLEXTRONICS', '74E60F': 'TECNO MOBILE LIMITED', '001AA7': 'Torian Wireless', '0CB459': 'Marketech International Corp.', '8014A8': 'Guangzhou V-SOLUTION Electronic Technology Co., Ltd.', '409BCD': 'D-Link International', '002EC7': 'HUAWEI TECHNOLOGIES CO.,LTD', '488EEF': 'HUAWEI TECHNOLOGIES CO.,LTD', '547595': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'C47154': 'TP-LINK TECHNOLOGIES CO.,LTD.', '586163': 'Quantum Networks (SG) Pte. Ltd.', 'EC3DFD': 'SHENZHEN BILIAN ELECTRONIC CO.,LTD', '94D029': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '308454': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '5C0339': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F82819': 'Liteon Technology Corporation', '0015E5': 'Cheertek Inc.', '50E971': 'Jibo, Inc.', '30F77F': 'S Mobile Devices Limited', 'D86C63': 'Google, Inc.', '0840F3': 'Tenda Technology Co.,Ltd.Dongguan branch', '94FBB2': 'SHENZHEN GONGJIN ELECTRONICS CO.,LT', '001E99': 'Vantanol Industrial Corporation', '58B633': 'Ruckus Wireless', '5C5181': 'Samsung Electronics Co.,Ltd', '608E08': 'Samsung Electronics Co.,Ltd', '543D37': 'Ruckus Wireless', '2CE6CC': 'Ruckus Wireless', '00227F': 'Ruckus Wireless', '74911A': 'Ruckus Wireless', '00C05D': 'L&N TECHNOLOGIES', '58C583': 'ITEL MOBILE LIMITED', '18204C': 'Kummler+Matter AG', '18D225': 'Fiberhome Telecommunication Technologies Co.,LTD', '18B430': 'Nest Labs Inc.', '30B164': 'Power Electronics International Inc.', 'F88A3C': 'IEEE Registration Authority', 'A40450': 'nFore Technology Inc.', '001B17': 'Palo Alto Networks', '58493B': 'Palo Alto Networks', '786D94': 'Palo Alto Networks', 'FC5A1D': 'Hitron Technologies. Inc', '94147A': 'vivo Mobile Communication Co., Ltd.', '3817E1': 'Technicolor CH USA Inc.', '9828A6': 'COMPAL INFORMATION (KUNSHAN) CO., LTD. ', '943FC2': 'Hewlett Packard Enterprise', '681DEF': 'Shenzhen CYX Technology Co., Ltd.', 'B40016': ' INGENICO TERMINALS SAS', 'AC203E': 'Wuhan Tianyu Information Industry Co., Ltd.', 'B01F29': 'Helvetia INC.', '880F10': 'Huami Information Technology Co.,Ltd.', '14612F': 'Avaya Inc', '00309D': 'Nimble Microsystems, Inc.', '8C210A': 'TP-LINK TECHNOLOGIES CO.,LTD.', '4C189A': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'CC4B73': 'AMPAK Technology, Inc.', '0015DC': 'KT&C Co., Ltd.', '00187D': 'Armorlink Co .Ltd', 'F430B9': 'Hewlett Packard', '0019F0': 'UNIONMAN TECHNOLOGY CO.,LTD', 'C8DB26': 'Logitech', 'A40E2B': 'Facebook Inc', 'AC4E2E': 'Shenzhen JingHanDa Electronics Co.Ltd', '4C910C': ' Lanix Internacional, S.A. de C.V.', 'A47886': 'Avaya Inc', '0403D6': 'Nintendo Co.,Ltd', '5C1A6F': 'Cambridge Industries(Group) Co.,Ltd.', '3C4CD0': 'CERAGON NETWORKS', 'F40E83': 'ARRIS Group, Inc.', '98F7D7': 'ARRIS Group, Inc.', 'B4BFF6': 'Samsung Electronics Co.,Ltd', '2C3AE8': 'Espressif Inc.', '88BD78': 'Flaircomm Microelectronics,Inc.', '58C5CB': 'Samsung Electronics Co.,Ltd', '206BE7': 'TP-LINK TECHNOLOGIES CO.,LTD.', '182CB4': 'Nectarsoft Co., Ltd.', '54C9DF': 'FN-LINK TECHNOLOGY LIMITED', '74F61C': 'HTC Corporation', 'B8FFB3': 'MitraStar Technology Corp.', 'EC237B': 'zte corporation', 'A0C9A0': 'Murata Manufacturing Co., Ltd.', '982DBA': 'Fibergate Inc.', '84C0EF': 'Samsung Electronics Co.,Ltd', '00A38E': 'Cisco Systems, Inc', 'E0D55E': 'GIGA-BYTE TECHNOLOGY CO.,LTD.', 'A040A0': 'NETGEAR', '000D2B': 'Racal Instruments', '004066': 'APRESIA Systems Ltd', '48A74E': 'zte corporation', 'BC8AE8': 'QING DAO HAIER TELECOM CO.,LTD.', 'F4DE0C': 'ESPOD Ltd.', '3C5282': 'Hewlett Packard', '08ED02': 'IEEE Registration Authority', 'E8FDE8': 'CeLa Link Corporation', '28C63F': 'Intel Corporate', '88CC45': 'Skyworth Digital Technology(Shenzhen) Co.,Ltd', '600837': 'ivvi Scientific(Nanchang)Co.Ltd', 'EC363F': 'Markov Corporation', '5804CB': 'Tianjin Huisun Technology Co.,Ltd.', '60D7E3': 'IEEE Registration Authority', '1893D7': 'Texas Instruments', 'A8B86E': 'LG Electronics (Mobile Communications)', 'CC90E8': 'Shenzhen YOUHUA Technology Co., Ltd', '7C4F7D': 'Sawwave', '9CAC6D': 'Universal Electronics, Inc.', '08EA40': 'SHENZHEN BILIAN ELECTRONIC CO.,LTD', '00D095': 'Alcatel-Lucent Enterprise', '0020DA': 'Alcatel-Lucent Enterprise', '6C5976': 'Shanghai Tricheer Technology Co.,Ltd.', '7C7B8B': 'Control Concepts, Inc.', '84A9C4': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A0086F': 'HUAWEI TECHNOLOGIES CO.,LTD', '34CE00': 'XIAOMI Electronics,CO.,LTD', 'D06F82': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A0F479': 'HUAWEI TECHNOLOGIES CO.,LTD', '844765': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C4FF1F': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A0C4A5': 'SYGN HOUSE CO.,LTD', 'B83765': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '345BBB': 'GD Midea Air-Conditioning Equipment Co.,Ltd.', 'C40BCB': 'Xiaomi Communications Co Ltd', '84AFEC': 'BUFFALO.INC', '5CC6E9': 'Edifier International', '98DDEA': 'Infinix mobility limited', '001D44': 'Krohne', 'A8A198': 'TCT mobile ltd', 'E0C0D1': 'CK Telecom (Shenzhen) Limited', '00219E': 'Sony Mobile Communications Inc', 'ACB57D': 'Liteon Technology Corporation', 'D4619D': 'Apple, Inc.', 'D0498B': 'ZOOM SERVER', '0827CE': 'NAGANO KEIKI CO., LTD.', 'C0D3C0': 'Samsung Electronics Co.,Ltd', '948BC1': 'Samsung Electronics Co.,Ltd', '14568E': 'Samsung Electronics Co.,Ltd', '14BD61': 'Apple, Inc.', '54E061': 'SICHUAN TIANYI COMHEART TELECOMCO., LTD', '503A7D': 'AlphaTech PLC Int’l Co., Ltd.', 'F4C4D6': 'Shenzhen Xinfa Electronic Co.,ltd', '6837E9': 'Amazon Technologies Inc.', '2CA17D': 'ARRIS Group, Inc.', 'D83214': 'Tenda Technology Co.,Ltd.Dongguan branch', '10954B': 'Megabyte Ltd.', 'D8325A': 'Shenzhen YOUHUA Technology Co., Ltd', '9CDA3E': 'Intel Corporate', '283F69': 'Sony Mobile Communications Inc', '002CC8': 'Cisco Systems, Inc', 'C0028D': 'WINSTAR Display CO.,Ltd', 'E89FEC': 'CHENGDU KT ELECTRONIC HI-TECH CO.,LTD', '802689': 'D-Link International', 'F8AB05': 'Sagemcom Broadband SAS', '7C5049': 'Apple, Inc.', 'E47DEB': 'Shanghai Notion Information Technology CO.,LTD.', 'C4B9CD': 'Cisco Systems, Inc', 'EC4F82': 'Calix Inc.', 'D461FE': 'Hangzhou H3C Technologies Co., Limited', '2C4D54': 'ASUSTek COMPUTER INC.', '349672': 'TP-LINK TECHNOLOGIES CO.,LTD.', '64B473': 'Xiaomi Communications Co Ltd', '7451BA': 'Xiaomi Communications Co Ltd', '6CB4A7': 'Landauer, Inc.', '7802F8': 'Xiaomi Communications Co Ltd', '00238A': 'Ciena Corporation', '001081': 'DPS, INC.', '40F385': 'IEEE Registration Authority', '887873': 'Intel Corporate', 'F87588': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F44C7F': 'HUAWEI TECHNOLOGIES CO.,LTD', '28A24B': 'Juniper Networks', '080027': 'PCS Systemtechnik GmbH', 'F8A5C5': 'Cisco Systems, Inc', '7C5A1C': 'Sophos Ltd', 'B0F1EC': 'AMPAK Technology, Inc.', '542B57': 'Night Owl SP', '501E2D': 'StreamUnlimited Engineering GmbH', 'E45D51': 'SFR', 'EC01EE': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '6049C1': 'Avaya Inc', '702084': 'Hon Hai Precision Ind. Co., Ltd.', '9C6650': 'Glodio Technolies Co.,Ltd Tianjin Branch', 'A0A33B': 'HUAWEI TECHNOLOGIES CO.,LTD', '7C67A2': 'Intel Corporate', '00E05A': 'GALEA NETWORK SECURITY', '48A380': 'Gionee Communication Equipment Co.,Ltd.', '94652D': 'OnePlus Technology (Shenzhen) Co., Ltd', '1CB857': 'Becon Technologies Co,.Ltd.', '682737': 'Samsung Electronics Co.,Ltd', 'F06E32': 'MICROTEL INNOVATION S.R.L.', '54C415': 'Hangzhou Hikvision Digital Technology Co.,Ltd.', '3CF862': 'Intel Corporate', '2816AD': 'Intel Corporate', '0060D3': 'AT&T', '848DC7': 'Cisco SPVTG', '001992': 'Adtran Inc', '045D4B': 'Sony Corporation', '78AF58': 'GIMASI SA', '90505A': 'unGlue, Inc', '8C9351': 'Jigowatts Inc.', 'D838FC': 'Ruckus Wireless', '3478D7': 'Gionee Communication Equipment Co.,Ltd.', '5CCCA0': 'Gridwiz Inc.', '6831FE': 'Teladin Co.,Ltd.', '5800E3': 'Liteon Technology Corporation', '2C0BE9': 'Cisco Systems, Inc', 'C43018': 'MCS Logic Inc.', 'D0FF98': 'HUAWEI TECHNOLOGIES CO.,LTD', 'B0E5ED': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C486E9': 'HUAWEI TECHNOLOGIES CO.,LTD', '000AE4': 'Wistron Corporation', '344B3D': 'Fiberhome Telecommunication Technologies Co.,LTD', 'FC3CE9': 'Tsingtong Technologies Co, Ltd.', 'A408F5': 'Sagemcom Broadband SAS', '00B091': 'Transmeta Corp.', 'ACC662': 'MitraStar Technology Corp.', '886B44': 'Sunnovo International Limited', 'A4580F': 'IEEE Registration Authority', 'C8F733': 'Intel Corporate', 'E0A700': 'Verkada Inc', '58404E': 'Apple, Inc.', 'D0C5F3': 'Apple, Inc.', 'BC9FEF': 'Apple, Inc.', '20AB37': 'Apple, Inc.', '60F445': 'Apple, Inc.', '48F97C': 'Fiberhome Telecommunication Technologies Co.,LTD', '40B93C': 'Hewlett Packard Enterprise', 'C0BFC0': 'HUAWEI TECHNOLOGIES CO.,LTD', '9CD9CB': 'Lesira Manufacturing Pty Ltd', '94E979': 'Liteon Technology Corporation', 'A03D6F': 'Cisco Systems, Inc', 'A0E0AF': 'Cisco Systems, Inc', '187532': 'SICHUAN TIANYI COMHEART TELECOMCO., LTD', '4CB81C': 'SAM Electronics GmbH', '003048': 'Super Micro Computer, Inc.', '44D244': 'Seiko Epson Corporation', 'A08CF8': 'HUAWEI TECHNOLOGIES CO.,LTD', '7CF95C': 'U.I. Lapp GmbH', '101331': 'Technicolor', 'A4E6B1': 'Shanghai Joindata Technology Co.,Ltd.', 'C09C04': 'Shaanxi GuoLian Digital TV Technology Co.,Ltd.', 'ACD657': 'Shaanxi GuoLian Digital TV Technology Co.,Ltd.', '007686': 'Cisco Systems, Inc', '8C2FA6': 'Solid Optics B.V.', '8C192D': 'IEEE Registration Authority', '00ACE0': 'ARRIS Group, Inc.', '007532': 'INID BV', '6473E2': 'Arbiter Systems, Inc.', '88C626': 'Logitech, Inc', 'D0608C': 'zte corporation', 'AC233F': 'Shenzhen Minew Technologies Co., Ltd.', '7C03C9': 'Shenzhen YOUHUA Technology Co., Ltd', 'B8E937': 'Sonos, Inc.', 'E45D52': 'Avaya Inc', '0023F7': 'Private', '3C80AA': 'Ransnet Singapore Pte Ltd', 'B49691': 'Intel Corporate', 'C82158': 'Intel Corporate', '7C95B1': 'Aerohive Networks Inc.', '2420C7': 'Sagemcom Broadband SAS', 'D4C8B0': 'Prime Electronics & Satellitics Inc.', '446AB7': 'ARRIS Group, Inc.', '701CE7': 'Intel Corporate', '9C2A70': 'Hon Hai Precision Ind. Co.,Ltd.', '703D15': 'Hangzhou H3C Technologies Co., Limited', 'E49E12': 'FREEBOX SAS', '0481AE': 'Clack Corporation', '9C13AB': 'Chanson Water Co., Ltd.', '98E476': 'Zentan', '14A51A': 'HUAWEI TECHNOLOGIES CO.,LTD', '047503': 'HUAWEI TECHNOLOGIES CO.,LTD', '3CEF8C': 'Zhejiang Dahua Technology Co., Ltd.', 'FC372B': 'SICHUAN TIANYI COMHEART TELECOMCO.,LTD', 'A4D9A4': 'neXus ID Solutions AB', '484D7E': 'Dell Inc.', 'D4E90B': 'CVT CO.,LTD', 'CCB0DA': 'Liteon Technology Corporation', '7CCC1F': 'SICHUAN TIANYI COMHEART TELECOMCO.,LTD', '18F292': 'Shannon Systems', '8CEA1B': 'Edgecore Networks Corporation', 'E02CF3': 'MRS Electronic GmbH', '50B363': 'Digitron da Amazonia S/A', '2C0E3D': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', '58E16C': 'Ying Hua Information Technology (Shanghai)Co., LTD', '9C7DA3': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A4C64F': 'HUAWEI TECHNOLOGIES CO.,LTD', 'CCFD17': 'TCT mobile ltd', 'DC9FDB': 'Ubiquiti Networks Inc.', '002722': 'Ubiquiti Networks Inc.', '00156D': 'Ubiquiti Networks Inc.', '00D78F': 'Cisco Systems, Inc', '2CBABA': 'Samsung Electronics Co.,Ltd', '40D3AE': 'Samsung Electronics Co.,Ltd', '2CDD95': 'Taicang T&W Electronics', '88E87F': 'Apple, Inc.', '9CF48E': 'Apple, Inc.', '5CF7E6': 'Apple, Inc.', 'B853AC': 'Apple, Inc.', '203CAE': 'Apple, Inc.', 'A03BE3': 'Apple, Inc.', '4C3275': 'Apple, Inc.', '487A55': 'ALE International', '001EAE': 'Continental Automotive Systems Inc.', '502B73': 'Tenda Technology Co.,Ltd.Dongguan branch', '04BA36': 'Li Seng Technology Ltd', '107223': 'TELLESCOM INDUSTRIA E COMERCIO EM TELECOMUNICACAO ', 'E0686D': 'Raybased AB', '1861C7': 'lemonbeat GmbH', '9CDC71': 'Hewlett Packard Enterprise', 'B4F81E': 'Kinova', 'D03DC3': 'AQ Corporation', 'EC01E2': 'FOXCONN INTERCONNECT TECHNOLOGY', 'B4E782': 'Vivalnk', '4409B8': 'Salcomp (Shenzhen) CO., LTD.', '3816D1': 'Samsung Electronics Co.,Ltd', 'D0176A': 'Samsung Electronics Co.,Ltd', 'D48890': 'Samsung Electronics Co.,Ltd', '5492BE': 'Samsung Electronics Co.,Ltd', '205D47': 'vivo Mobile Communication Co., Ltd.', '10C60C': 'Domino UK Ltd', '043110': 'Inspur Group Co., Ltd.', '949AA9': 'Microsoft Corporation', 'ACAB2E': 'Beijing LasNubes Technology Co., Ltd.', '600B03': 'Hangzhou H3C Technologies Co., Limited', 'A0AB1B': 'D-Link International', 'D842E2': 'Canary Connect, Inc.', 'C8B21E': 'CHIPSEA TECHNOLOGIES (SHENZHEN) CORP.', '000678': 'D&M Holdings Inc.', 'E0286D': 'AVM Audiovisuelles Marketing und Computersysteme GmbH', '884CCF': 'Pulzze Systems, Inc', '500959': 'Technicolor CH USA Inc.', 'E41218': 'ShenZhen Rapoo Technology Co., Ltd.', '001984': 'ESTIC Corporation', '001628': 'Magicard Ltd', '702E22': 'zte corporation', 'C8E776': 'PTCOM Technology', '000278': 'SAMSUNG ELECTRO MECHANICS CO., LTD.', '002399': 'Samsung Electronics Co.,Ltd', '0017C9': 'Samsung Electronics Co.,Ltd', '906EBB': 'Hon Hai Precision Ind. Co.,Ltd.', '18F46A': 'Hon Hai Precision Ind. Co.,Ltd.', '4C0F6E': 'Hon Hai Precision Ind. Co.,Ltd.', '78E400': 'Hon Hai Precision Ind. Co.,Ltd.', '00212F': 'Phoebe Micro Inc.', '3859F9': 'Hon Hai Precision Ind. Co.,Ltd.', 'EC55F9': 'Hon Hai Precision Ind. Co.,Ltd.', 'C4731E': 'Samsung Electronics Co.,Ltd', '5C0A5B': 'SAMSUNG ELECTRO MECHANICS CO., LTD.', '7CE9D3': 'Hon Hai Precision Ind. Co.,Ltd.', '1C3E84': 'Hon Hai Precision Ind. Co.,Ltd.', 'B8763F': 'Hon Hai Precision Ind. Co.,Ltd.', '60F494': 'Hon Hai Precision Ind. Co.,Ltd.', '8056F2': 'Hon Hai Precision Ind. Co.,Ltd.', '7CF854': 'Samsung Electronics Co.,Ltd', '001B98': 'Samsung Electronics Co.,Ltd', '001A8A': 'Samsung Electronics Co.,Ltd', '3C5A37': 'Samsung Electronics Co.,Ltd', 'F49F54': 'Samsung Electronics Co.,Ltd', '34C3AC': 'Samsung Electronics Co.,Ltd', '44F459': 'Samsung Electronics Co.,Ltd', '00265D': 'Samsung Electronics Co.,Ltd', 'CCF9E8': 'Samsung Electronics Co.,Ltd', 'D857EF': 'Samsung Electronics Co.,Ltd', '18E2C2': 'Samsung Electronics Co.,Ltd', '9852B1': 'Samsung Electronics Co.,Ltd', 'E440E2': 'Samsung Electronics Co.,Ltd', '103B59': 'Samsung Electronics Co.,Ltd', 'D890E8': 'Samsung Electronics Co.,Ltd', 'C462EA': 'Samsung Electronics Co.,Ltd', '14F42A': 'Samsung Electronics Co.,Ltd', '0808C2': 'Samsung Electronics Co.,Ltd', 'CCFE3C': 'Samsung Electronics Co.,Ltd', '28BAB5': 'Samsung Electronics Co.,Ltd', '182666': 'Samsung Electronics Co.,Ltd', '30D6C9': 'Samsung Electronics Co.,Ltd', 'CC07AB': 'Samsung Electronics Co.,Ltd', '002567': 'Samsung Electronics Co.,Ltd', 'BCB1F3': 'Samsung Electronics Co.,Ltd', '1C62B8': 'Samsung Electronics Co.,Ltd', 'B43A28': 'Samsung Electronics Co.,Ltd', '78A873': 'Samsung Electronics Co.,Ltd', '001C43': 'Samsung Electronics Co.,Ltd', '0023D6': 'Samsung Electronics Co.,Ltd', 'E4121D': 'Samsung Electronics Co.,Ltd', '44D6E1': 'Snuza International Pty. Ltd.', 'FC9114': 'Technicolor CH USA Inc.', '486DBB': 'Vestel Elektronik San ve Tic. A.Ş.', '002A10': 'Cisco Systems, Inc', '00A289': 'Cisco Systems, Inc', '44E9DD': 'Sagemcom Broadband SAS', '000F5E': 'Veo', '001328': 'Westech Korea Inc.,', 'B8BF83': 'Intel Corporate', '8C6102': 'Beijing Baofengmojing Technologies Co., Ltd', '548CA0': 'Liteon Technology Corporation', '7C79E8': 'PayRange Inc.', 'A43111': 'ZIV', '008073': 'DWB ASSOCIATES', '80A1D7': 'Shanghai DareGlobal Technologies Co.,Ltd', 'EC1F72': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', '8C0D76': 'HUAWEI TECHNOLOGIES CO.,LTD', '84BE52': 'HUAWEI TECHNOLOGIES CO.,LTD', '849FB5': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A4CAA0': 'HUAWEI TECHNOLOGIES CO.,LTD', '84E0F4': 'IEEE Registration Authority', 'D83062': 'Apple, Inc.', 'D0E54D': 'ARRIS Group, Inc.', 'A0C562': 'ARRIS Group, Inc.', '8496D8': 'ARRIS Group, Inc.', '0026D9': 'ARRIS Group, Inc.', 'E8508B': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', 'F8042E': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', '00D037': 'ARRIS Group, Inc.', '84E058': 'ARRIS Group, Inc.', '707630': 'ARRIS Group, Inc.', '04BBF9': 'Pavilion Data Systems Inc', 'E4BEED': 'Netcore Technology Inc.', '58FB84': 'Intel Corporate', '00A00E': 'NetAlly', '00C017': 'NetAlly', '5CB066': 'ARRIS Group, Inc.', 'BC8AA3': 'NHN Entertainment', 'A8BD27': 'Hewlett Packard Enterprise', '345760': 'MitraStar Technology Corp.', 'C0D391': 'IEEE Registration Authority', 'D49B5C': 'Chongqing Miedu Technology Co., Ltd.', 'E8EB11': 'Texas Instruments', '44BFE3': 'Shenzhen Longtech Electronics Co.,Ltd', '3C6FEA': 'Panasonic India Pvt. Ltd.', '002261': 'Frontier Silicon Ltd', '001988': 'Wi2Wi, Inc', '18DC56': 'Yulong Computer Telecommunication Scientific (Shenzhen) Co.,Ltd', '0016F2': 'Dmobile System Co., Ltd.', '34074F': 'AccelStor, Inc.', 'B4A984': 'Symantec Corporation', 'B0B28F': 'Sagemcom Broadband SAS', '441441': 'AudioControl Inc.', 'C88D83': 'HUAWEI TECHNOLOGIES CO.,LTD', '00E011': 'UNIDEN CORPORATION', '002555': 'Visonic Technologies 1993 Ltd.', '58986F': 'Revolution Display', 'C81FBE': 'HUAWEI TECHNOLOGIES CO.,LTD', '203DB2': 'HUAWEI TECHNOLOGIES CO.,LTD', '48D539': 'HUAWEI TECHNOLOGIES CO.,LTD', '001F9A': 'Nortel Networks', '000A0E': 'Invivo Research Inc.', '001660': 'Nortel Networks', '001E7E': 'Nortel Networks', '001365': 'Nortel Networks', '000438': 'Nortel Networks', '000EC0': 'Nortel Networks', 'D84FB8': 'LG ELECTRONICS', '000AEB': 'TP-LINK TECHNOLOGIES CO.,LTD.', '2C3731': 'SHENZHEN YIFANG DIGITAL TECHNOLOGY CO.,LTD.', '60EE5C': 'SHENZHEN FAST TECHNOLOGIES CO.,LTD', '6488FF': 'Sichuan Changhong Electric Ltd.', '002162': 'Nortel Networks', '02E6D3': 'NIXDORF COMPUTER CORP.', '0016B9': 'ProCurve Networking by HP', 'C4084A': 'Nokia', '000801': 'HighSpeed Surfing Inc.', '000772': 'Alcatel-Lucent Shanghai Bell Co., Ltd', 'E03005': 'Alcatel-Lucent Shanghai Bell Co., Ltd', '3C404F': 'GUANGDONG PISEN ELECTRONICS CO.,LTD', '0CA402': 'Alcatel-Lucent IPD', 'A0F3E4': 'Alcatel-Lucent IPD', '84DBFC': 'Nokia', '7CFC3C': 'Visteon Corporation', '981E0F': 'Jeelan (Shanghai Jeelan Technology Information Inc', '4888CA': 'Motorola (Wuhan) Mobility Technologies Communication Co., Ltd.', '385610': 'CANDY HOUSE, Inc.', '00A742': 'Cisco Systems, Inc', '00AA70': 'LG Electronics (Mobile Communications)', 'F895C7': 'LG Electronics (Mobile Communications)', '84D931': 'Hangzhou H3C Technologies Co., Limited', '00116E': 'Peplink International Ltd.', '540955': 'zte corporation', '001E75': 'LG Electronics (Mobile Communications)', '001C62': 'LG Electronics (Mobile Communications)', '505527': 'LG Electronics (Mobile Communications)', '88C9D0': 'LG Electronics (Mobile Communications)', 'C041F6': 'LG ELECTRONICS INC', '8C3AE3': 'LG Electronics (Mobile Communications)', '90A46A': 'SISNET CO., LTD', '14E7C8': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', 'ECCD6D': 'Allied Telesis, Inc.', '18339D': 'Cisco Systems, Inc', '146102': 'Alpine Electronics, Inc.', '54276C': 'Jiangsu Houge Technology Corp.', '9CA3A9': 'Guangzhou Juan Optical and Electronical Tech Joint Stock Co., Ltd', '7CC709': 'SHENZHEN RF-LINK TECHNOLOGY CO.,LTD.', 'A03E6B': 'IEEE Registration Authority', '9802D8': 'IEEE Registration Authority', '64FB81': 'IEEE Registration Authority', '0821EF': 'Samsung Electronics Co.,Ltd', '34145F': 'Samsung Electronics Co.,Ltd', '2C265F': 'IEEE Registration Authority', 'D0052A': 'Arcadyan Corporation', 'EC6881': 'Palo Alto Networks', 'E4509A': 'HW Communications Ltd', '702900': 'Shenzhen ChipTrip Technology Co,Ltd', 'ECAAA0': 'PEGATRON CORPORATION', '00E0DD': 'Zenith Electronics Corporation', '50CE75': 'Measy Electronics Co., Ltd.', '00045B': 'Techsan Electronics Co., Ltd.', '0007BA': 'UTStarcom Inc', '90A210': 'United Telecoms Ltd', '6C0B84': 'Universal Global Scientific Industrial Co., Ltd.', '001597': 'AETA AUDIO SYSTEMS', '002397': 'Westell Technologies Inc.', '60E3AC': 'LG Electronics (Mobile Communications)', '90F052': 'MEIZU Technology Co., Ltd.', '001639': 'Ubiquam Co., Ltd.', '000C29': 'VMware, Inc.', '000569': 'VMware, Inc.', '000B0E': 'Trapeze Networks', '8CFDF0': 'Qualcomm Inc.', 'C4BB4C': 'Zebra Information Tech Co. Ltd', '98CF53': 'BBK EDUCATIONAL ELECTRONICS CORP.,LTD.', 'D4A148': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D065CA': 'HUAWEI TECHNOLOGIES CO.,LTD', '486B2C': 'BBK EDUCATIONAL ELECTRONICS CORP.,LTD.', '6C25B9': 'BBK EDUCATIONAL ELECTRONICS CORP.,LTD.', '2C282D': 'BBK EDUCATIONAL ELECTRONICS CORP.,LTD.', '4813F3': 'BBK EDUCATIONAL ELECTRONICS CORP.,LTD.', '00409F': 'Telco Systems, Inc. ', '00001F': 'Telco Systems, Inc. ', '00A012': 'Telco Systems, Inc. ', '8CEBC6': 'HUAWEI TECHNOLOGIES CO.,LTD', 'B08900': 'HUAWEI TECHNOLOGIES CO.,LTD', '78CB68': 'DAEHAP HYPER-TECH', '34ED0B': ' Shanghai XZ-COM.CO.,Ltd.', 'F0DEF1': 'Wistron Infocomm (Zhongshan) Corporation', 'F80F41': 'Wistron Infocomm (Zhongshan) Corporation', '3C970E': 'Wistron InfoComm(Kunshan)Co.,Ltd.', '30144A': 'Wistron Neweb Corporation', '4C0BBE': 'Microsoft', '0C2576': 'LONGCHEER TELECOMMUNICATION LIMITED', 'D8D43C': 'Sony Corporation', 'D44165': 'SICHUAN TIANYI COMHEART TELECOMCO.,LTD', 'E4029B': 'Intel Corporate', 'DC1AC5': 'vivo Mobile Communication Co., Ltd.', 'F45EAB': 'Texas Instruments', 'A8FCB7': 'Consolidated Resource Imaging', '00C000': 'LANOPTICS, LTD.', '845181': 'Samsung Electronics Co.,Ltd', 'B0C287': 'Technicolor CH USA Inc.', 'CC3540': 'Technicolor CH USA Inc.', '8C04FF': 'Technicolor CH USA Inc.', 'FC94E3': 'Technicolor CH USA Inc.', 'B88D12': 'Apple, Inc.', '90EF68': 'Zyxel Communications Corporation', 'C816BD': 'Qingdao Hisense Communications Co.,Ltd.', '00EBD5': 'Cisco Systems, Inc', 'C48F07': 'Shenzhen Yihao Hulian Science and Technology Co., Ltd.', 'DC7834': 'LOGICOM SA', 'CCCC81': 'HUAWEI TECHNOLOGIES CO.,LTD', '6C9522': 'Scalys', 'B456B9': 'Teraspek Technologies Co.,Ltd', '9CDD1F': 'Intelligent Steward Co.,Ltd', '3C6816': 'VXi Corporation', 'E811CA': 'SHANDONG KAER ELECTRIC.CO.,LTD', '70288B': 'Samsung Electronics Co.,Ltd', '348A7B': 'Samsung Electronics Co.,Ltd', 'D0577B': 'Intel Corporate', '78009E': 'Samsung Electronics Co.,Ltd', 'ACC33A': 'Samsung Electronics Co.,Ltd', '54F201': 'Samsung Electronics Co.,Ltd', 'C4A366': 'zte corporation', '6073BC': 'zte corporation', '7C3548': 'Transcend Information', '18B169': 'Sonicwall', '444450': 'OttoQ', '50F5DA': 'Amazon Technologies Inc.', '101212': 'Vivo International Corporation Pty Ltd', 'C85B76': 'LCFC(HeFei) Electronics Technology co., ltd', '78FFCA': 'TECNO MOBILE LIMITED', '046565': 'Testop', 'A8BB50': 'WiZ IoT Company Limited', '08C021': 'HUAWEI TECHNOLOGIES CO.,LTD', '600810': 'HUAWEI TECHNOLOGIES CO.,LTD', '48435A': 'HUAWEI TECHNOLOGIES CO.,LTD', '8C8EF2': 'Apple, Inc.', '90B0ED': 'Apple, Inc.', 'F03EBF': 'GOGORO TAIWAN LIMITED', '3C92DC': 'Octopod Technology Co. Ltd.', '1000FD': 'LaonPeople', 'C47C8D': 'IEEE Registration Authority', '745C9F': 'TCT mobile ltd', '8C99E6': 'TCT mobile ltd', '449F7F': 'DataCore Software Corporation', '848319': 'Hangzhou Zero Zero Technology Co., Ltd.', 'A81559': 'Breathometer, Inc.', '70BAEF': 'Hangzhou H3C Technologies Co., Limited', '586AB1': 'Hangzhou H3C Technologies Co., Limited', '009006': 'Hamamatsu Photonics K.K.', '001AF4': 'Handreamnet', '04D3CF': 'Apple, Inc.', '4882F2': 'Appel Elektronik GmbH', '78B84B': 'SICHUAN TIANYI COMHEART TELECOMCO.,LTD', '7CB0C2': 'Intel Corporate', '001793': 'Tigi Corporation', '000358': 'Hanyang Digitech Co.Ltd', 'C4CAD9': 'Hangzhou H3C Technologies Co., Limited', '5866BA': 'Hangzhou H3C Technologies Co., Limited', 'E0C79D': 'Texas Instruments', '4C0B3A': 'TCT mobile ltd', 'E42D02': 'TCT mobile ltd', '0CBD51': 'TCT mobile ltd', '4000E0': 'Derek(Shaoguan)Limited', 'FCBC9C': 'Vimar Spa', '149ECF': 'Dell Inc.', 'E80959': 'Guoguang Electric Co.,Ltd', 'D89403': 'Hewlett Packard Enterprise', 'E00EDA': 'Cisco Systems, Inc', 'D0A4B1': 'Sonifex Ltd.', 'F49EEF': 'Taicang T&W Electronics', 'C4F081': 'HUAWEI TECHNOLOGIES CO.,LTD', '801382': 'HUAWEI TECHNOLOGIES CO.,LTD', '94FE22': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F4CC55': 'Juniper Networks', '50DD4F': 'Automation Components, Inc', '341FE4': 'ARRIS Group, Inc.', '0024F4': 'Kaminario, Ltd.', '001A29': 'Johnson Outdoors Marine Electronics d/b/a Minnkota', '0090AE': 'ITALTEL S.p.A/RF-UP-I', '00177D': 'IDT Technology Limited', '00A045': 'PHOENIX CONTACT Electronics GmbH', '002378': 'GN Netcom A/S', '50C971': 'GN Netcom A/S', 'F0407B': 'Fiberhome Telecommunication Technologies Co.,LTD', '94885E': 'Surfilter Network Technology Co., Ltd. ', 'C825E1': 'Lemobile Information Technology (Beijing) Co., Ltd', '945089': 'SimonsVoss Technologies GmbH', '042AE2': 'Cisco Systems, Inc', 'E0B6F5': 'IEEE Registration Authority', '0090FA': 'Emulex Corporation', '00E0D5': 'Emulex Corporation', '001035': 'Elitegroup Computer Systems Co.,Ltd.', '000AE6': 'Elitegroup Computer Systems Co.,Ltd.', '7427EA': 'Elitegroup Computer Systems Co.,Ltd.', '649968': 'Elentec', '00409C': 'TRANSWARE', 'B01BD2': 'Le Shi Zhi Xin Electronic Technology (Tianjin) Limited', '54489C': 'CDOUBLES ELECTRONICS CO. LTD.', 'E4A1E6': 'Alcatel-Lucent Shanghai Bell Co., Ltd', '84002D': 'PEGATRON CORPORATION', '408256': 'Continental Automotive GmbH', '5CC7D7': 'AZROAD TECHNOLOGY COMPANY LIMITED', '986B3D': 'ARRIS Group, Inc.', 'E0071B': 'Hewlett Packard Enterprise', '1CABC0': 'Hitron Technologies. Inc', '1C3ADE': 'Samsung Electronics Co.,Ltd', '002360': 'Lookit Technology Co., Ltd', '84FEDC': 'Borqs Beijing Ltd.', '608334': 'HUAWEI TECHNOLOGIES CO.,LTD', 'E47E66': 'HUAWEI TECHNOLOGIES CO.,LTD', '94DBDA': 'HUAWEI TECHNOLOGIES CO.,LTD', '54D9E4': 'BRILLIANTTS CO., LTD', 'F462D0': 'Not for Radio, LLC', '98DED0': 'TP-LINK TECHNOLOGIES CO.,LTD.', '508965': 'SHENZHEN MERCURY COMMUNICATION TECHNOLOGIES CO.,LTD.', '005BA1': 'shanghai huayuan chuangxin software CO., LTD.', '58D67A': 'TCPlink', '98072D': 'Texas Instruments', 'F0C77F': 'Texas Instruments', '000AC2': 'Wuhan FiberHome Digital Technology Co.,Ltd.', '10DA43': 'NETGEAR', 'B805AB': 'zte corporation', '789682': 'zte corporation', 'D467E7': 'Fiberhome Telecommunication Technologies Co.,LTD', 'E42F26': 'Fiberhome Telecommunication Technologies Co.,LTD', '04C1B9': 'Fiberhome Telecommunication Technologies Co.,LTD', 'C4BED4': 'Avaya Inc', 'D017C2': 'ASUSTek COMPUTER INC.', '349971': 'Quanta Storage Inc.', '9C52F8': 'HUAWEI TECHNOLOGIES CO.,LTD', 'EC13DB': 'Juniper Networks', '5CF286': 'IEEE Registration Authority', 'E8FD72': 'SHANGHAI LINGUO TECHNOLOGY CO., LTD.', '98BB1E': 'BYD Precision Manufacture Company Ltd.', 'AC5F3E': 'SAMSUNG ELECTRO-MECHANICS(THAILAND)', '546D52': 'TOPVIEW OPTRONICS CORP.', '04C103': 'Clover Network, Inc.', '280C28': 'Unigen DataStorage Corporation', 'A4BF01': 'Intel Corporate', '208B37': 'Skyworth Digital Technology(Shenzhen) Co.,Ltd', '08BE77': 'Green Electronics', '509EA7': 'Samsung Electronics Co.,Ltd', 'A88195': 'Samsung Electronics Co.,Ltd', '88ADD2': 'Samsung Electronics Co.,Ltd', '00CCFC': 'Cisco Systems, Inc', '0019C5': 'Sony Interactive Entertainment Inc.', '001315': 'Sony Interactive Entertainment Inc.', '1C234F': 'EDMI Europe Ltd', 'A444D1': ' Wingtech Group (HongKong)Limited', '006CFD': 'Sichuan Changhong Electric Ltd.', '545AA6': 'Espressif Inc.', 'FC1A11': 'vivo Mobile Communication Co., Ltd.', '001A57': 'Matrix Design Group, LLC', 'A0C589': 'Intel Corporate', '001E1E': 'Honeywell Life Safety', '002340': 'MiXTelematics', 'B48B19': 'Apple, Inc.', '38FDFE': 'IEEE Registration Authority', '2C09CB': 'COBS AB', 'BCEC5D': 'Apple, Inc.', '28A02B': 'Apple, Inc.', 'A84481': 'Nokia Corporation', '8844F6': 'Nokia Corporation', 'F44D17': 'GOLDCARD HIGH-TECH CO.,LTD.', '38B8EB': 'IEEE Registration Authority', '9897D1': 'MitraStar Technology Corp.', 'B83241': 'Wuhan Tianyu Information Industry Co., Ltd.', '0060DC': 'NEC Magnus Communications,Ltd.', '907282': 'Sagemcom Broadband SAS', '001C35': 'Nokia Danmark A/S', '001C9A': 'Nokia Danmark A/S', '001CD6': 'Nokia Danmark A/S', '001CD4': 'Nokia Danmark A/S', '001D98': 'Nokia Danmark A/S', '001DE9': 'Nokia Danmark A/S', '001E3A': 'Nokia Danmark A/S', '002548': 'Nokia Danmark A/S', '0022FC': 'Nokia Danmark A/S', '0022FD': 'Nokia Danmark A/S', '0021AA': 'Nokia Danmark A/S', '001D6E': 'Nokia Danmark A/S', '001CFC': 'Sumitomo Electric Industries,Ltd', '0023B4': 'Nokia Danmark A/S', '001370': 'Nokia Danmark A/S', '9C1874': 'Nokia Danmark A/S', '001BAF': 'Nokia Danmark A/S', 'C8D10B': 'Nokia Corporation', '5CC6D0': 'Skyworth Digital Technology(Shenzhen) Co.,Ltd', '001A9A': 'Skyworth Digital Technology(Shenzhen) Co.,Ltd', 'CC6DA0': 'Roku, Inc.', '0016E4': 'VANGUARD SECURITY ENGINEERING CORP.', '3C8970': 'Neosfar', '6C38A1': 'Ubee Interactive Co., Limited', '001742': 'FUJITSU LIMITED', '001999': 'Fujitsu Technology Solutions GmbH', '005A39': 'SHENZHEN FAST TECHNOLOGIES CO.,LTD', 'A089E4': 'Skyworth Digital Technology(Shenzhen) Co.,Ltd', '78CA83': 'IEEE Registration Authority', '0C1167': 'Cisco Systems, Inc', '74EAE8': 'ARRIS Group, Inc.', 'F88E85': 'Comtrend Corporation', '02CF1C': 'Communication Machinery Corporation', '0090F5': 'CLEVO CO.', '0030FF': 'DataFab Systems Inc.', '000FF6': 'DARFON LIGHTING CORP', '002100': 'Gemtek Technology Co., Ltd.', '50F520': 'Samsung Electronics Co.,Ltd', '64B310': 'Samsung Electronics Co.,Ltd', 'A4EBD3': 'Samsung Electronics Co.,Ltd', 'C81073': 'CENTURY OPTICOMM CO.,LTD', '343759': 'zte corporation', 'FC2F40': 'Calxeda, Inc.', 'BC620E': 'HUAWEI TECHNOLOGIES CO.,LTD', '74A528': 'HUAWEI TECHNOLOGIES CO.,LTD', '5CF6DC': 'Samsung Electronics Co.,Ltd', '0026E4': 'Canal +', '000117': 'Canal +', 'E01D3B': 'Cambridge Industries(Group) Co.,Ltd.', '70C76F': 'INNO S', '38192F': 'Nokia Corporation', 'B8C68E': 'Samsung Electronics Co.,Ltd', '04FE31': 'Samsung Electronics Co.,Ltd', '4CBCA5': 'Samsung Electronics Co.,Ltd', 'D831CF': 'Samsung Electronics Co.,Ltd', '188331': 'Samsung Electronics Co.,Ltd', '9C65B0': 'Samsung Electronics Co.,Ltd', '8455A5': 'Samsung Electronics Co.,Ltd', 'A87C01': 'Samsung Electronics Co.,Ltd', 'B0D09C': 'Samsung Electronics Co.,Ltd', '50C8E5': 'Samsung Electronics Co.,Ltd', '0020D4': 'Cabletron Systems, Inc.', '00E03A': 'Cabletron Systems, Inc.', '0010E7': 'Breezecom, Ltd.', '9492BC': 'SYNTECH(HK) TECHNOLOGY LIMITED', '001D19': 'Arcadyan Technology Corporation', '0012BF': 'Arcadyan Technology Corporation', '507E5D': 'Arcadyan Technology Corporation', '7C4FB5': 'Arcadyan Technology Corporation', '00223F': 'NETGEAR', '000FB5': 'NETGEAR', '00095B': 'NETGEAR', '001A4F': 'AVM GmbH', '001C4A': 'AVM GmbH', '00150C': 'AVM GmbH', '0026FF': 'BlackBerry RTS', 'A4E4B8': 'BlackBerry RTS', '003067': "BIOSTAR Microtech Int'l Corp.", 'F40B93': 'BlackBerry RTS', '1C69A5': 'BlackBerry RTS', '94EBCD': 'BlackBerry RTS', '28C68E': 'NETGEAR', '04A151': 'NETGEAR', 'F87394': 'NETGEAR', '204E7F': 'NETGEAR', 'C03F0E': 'NETGEAR', '0026F2': 'NETGEAR', '00138F': 'Asiarock Technology Limited', '803773': 'NETGEAR', 'A42B8C': 'NETGEAR', 'CC7D37': 'ARRIS Group, Inc.', 'A47AA4': 'ARRIS Group, Inc.', '001700': 'ARRIS Group, Inc.', '0016B5': 'ARRIS Group, Inc.', '0015A8': 'ARRIS Group, Inc.', '00159A': 'ARRIS Group, Inc.', '001180': 'ARRIS Group, Inc.', '000B06': 'ARRIS Group, Inc.', '00D088': 'ARRIS Group, Inc.', '00128A': 'ARRIS Group, Inc.', '002375': 'ARRIS Group, Inc.', '0023A3': 'ARRIS Group, Inc.', '001ADB': 'ARRIS Group, Inc.', '001F7E': 'ARRIS Group, Inc.', '0011AE': 'ARRIS Group, Inc.', '94CCB9': 'ARRIS Group, Inc.', '3C438E': 'ARRIS Group, Inc.', '0024C1': 'ARRIS Group, Inc.', '0025F2': 'ARRIS Group, Inc.', '0025F1': 'ARRIS Group, Inc.', '001404': 'ARRIS Group, Inc.', '001AAD': 'ARRIS Group, Inc.', '0026BA': 'ARRIS Group, Inc.', '00230B': 'ARRIS Group, Inc.', '74E7C6': 'ARRIS Group, Inc.', '001C12': 'ARRIS Group, Inc.', '5C338E': 'Alpha Networks Inc.', '000941': 'Allied Telesis R&D Center K.K.', '28E347': 'Liteon Technology Corporation', '446D57': 'Liteon Technology Corporation', '9CB70D': 'Liteon Technology Corporation', '68A3C4': 'Liteon Technology Corporation', '70F1A1': 'Liteon Technology Corporation', '8400D2': 'Sony Mobile Communications Inc', '303926': 'Sony Mobile Communications Inc', '00EB2D': 'Sony Mobile Communications Inc', 'B4527D': 'Sony Mobile Communications Inc', '00D9D1': 'Sony Interactive Entertainment Inc.', 'B00594': 'Liteon Technology Corporation', 'EC086B': 'TP-LINK TECHNOLOGIES CO.,LTD.', '94A1A2': 'AMPAK Technology, Inc.', '00014A': 'Sony Corporation', '001EDC': 'Sony Mobile Communications Inc', '001D28': 'Sony Mobile Communications Inc', '001813': 'Sony Mobile Communications Inc', '402BA1': 'Sony Mobile Communications Inc', '983B16': 'AMPAK Technology, Inc.', '409558': 'Aisino Corporation', '182861': 'AirTies Wireless Networks', '6C71D9': 'AzureWave Technology Inc.', 'D0E782': 'AzureWave Technology Inc.', '6CADF8': 'AzureWave Technology Inc.', 'A81D16': 'AzureWave Technology Inc.', '34C3D2': 'FN-LINK TECHNOLOGY LIMITED', '54F6C5': 'FUJIAN STAR-NET COMMUNICATION CO.,LTD', 'D0D412': 'ADB Broadband Italia', '0026B8': 'Actiontec Electronics, Inc', '0026FC': 'AcSiP Technology Corp.', '0015AF': 'AzureWave Technology Inc.', '74F06D': 'AzureWave Technology Inc.', '44D832': 'AzureWave Technology Inc.', 'E0B9A5': 'AzureWave Technology Inc.', '781881': 'AzureWave Technology Inc.', 'B046FC': 'MitraStar Technology Corp.', 'E04136': 'MitraStar Technology Corp.', '001CA2': 'ADB Broadband Italia', '002233': 'ADB Broadband Italia', '3039F2': 'ADB Broadband Italia', '0017C2': 'ADB Broadband Italia', '689C5E': 'AcSiP Technology Corp.', '9C0E4A': 'Shenzhen Vastking Electronic Co.,Ltd.', 'A85840': 'Cambridge Industries(Group) Co.,Ltd.', 'A0D37A': 'Intel Corporate', '8896F2': 'Valeo Schalter und Sensoren GmbH', '001073': 'TECHNOBOX, INC.', '20934D': 'FUJIAN STAR-NET COMMUNICATION CO.,LTD', '009027': 'Intel Corporation', 'C48508': 'Intel Corporate', '6805CA': 'Intel Corporate', '247703': 'Intel Corporate', '74E50B': 'Intel Corporate', 'B80305': 'Intel Corporate', '00A0C9': 'Intel Corporation', '141877': 'Dell Inc.', 'E09796': 'HUAWEI TECHNOLOGIES CO.,LTD', '1C4024': 'Dell Inc.', '18FB7B': 'Dell Inc.', 'F8B156': 'Dell Inc.', '141AA3': 'Motorola Mobility LLC, a Lenovo Company', '3407FB': 'Ericsson AB', 'A4A1C2': 'Ericsson AB', '00065B': 'Dell Inc.', '842B2B': 'Dell Inc.', 'F04DA2': 'Dell Inc.', 'E0DB55': 'Dell Inc.', '000F1F': 'Dell Inc.', '24B6FD': 'Dell Inc.', '74E6E2': 'Dell Inc.', '34E6D7': 'Dell Inc.', '001EC9': 'Dell Inc.', '002170': 'Dell Inc.', '00219B': 'Dell Inc.', 'B8AC6F': 'Dell Inc.', '8CA982': 'Intel Corporate', 'BC7737': 'Intel Corporate', '1430C6': 'Motorola Mobility LLC, a Lenovo Company', 'D8FC93': 'Intel Corporate', 'D4AE52': 'Dell Inc.', '28162E': '2Wire Inc', 'F81897': '2Wire Inc', '94C150': '2Wire Inc', '5CF821': 'Texas Instruments', '88074B': 'LG Electronics (Mobile Communications)', '000D72': '2Wire Inc', '001288': '2Wire Inc', '00789E': 'Sagemcom Broadband SAS', 'E8BE81': 'Sagemcom Broadband SAS', '681590': 'Sagemcom Broadband SAS', 'F4EB38': 'Sagemcom Broadband SAS', '001BBF': 'Sagemcom Broadband SAS', '002569': 'Sagemcom Broadband SAS', '141FBA': 'IEEE Registration Authority', '807B85': 'IEEE Registration Authority', 'CC1BE0': 'IEEE Registration Authority', 'F40E11': 'IEEE Registration Authority', '10F681': 'vivo Mobile Communication Co., Ltd.', '00217C': '2Wire Inc', '001FB3': '2Wire Inc', '002275': 'Belkin International Inc.', '0057D2': 'Cisco Systems, Inc', '3C6716': 'Lily Robotics', '00D0BD': 'Lattice Semiconductor Corp. (LPA)', '001F3A': 'Hon Hai Precision Ind. Co.,Ltd.', 'C8A030': 'Texas Instruments', '78C5E5': 'Texas Instruments', '0CFD37': 'SUSE Linux GmbH', '2C228B': 'CTR SRL', '0C6F9C': 'Shaw Communications Inc.', '0017E4': 'Texas Instruments', '04E451': 'Texas Instruments', '505663': 'Texas Instruments', '883314': 'Texas Instruments', '647BD4': 'Texas Instruments', 'D8952F': 'Texas Instruments', 'B8FFFE': 'Texas Instruments', '2CE412': 'Sagemcom Broadband SAS', '44C15C': 'Texas Instruments', '0022A5': 'Texas Instruments', 'E043DB': 'Shenzhen ViewAt Technology Co.,Ltd. ', '3CCF5B': 'ICOMM HK LIMITED', '2405F5': 'Integrated Device Technology (Malaysia) Sdn. Bhd.', '3C3556': 'Cognitec Systems GmbH', '3C9066': 'SmartRG, Inc.', '000D88': 'D-Link Corporation', '001195': 'D-Link Corporation', '001346': 'D-Link Corporation', '78E3B5': 'Hewlett Packard', '78ACC0': 'Hewlett Packard', '68B599': 'Hewlett Packard', '1CC1DE': 'Hewlett Packard', 'B8A386': 'D-Link International', '1C7EE5': 'D-Link International', '1CBDB9': 'D-Link International', '00142F': 'Savvius', '28BC18': 'SourcingOverseas Co. Ltd', '94ABDE': 'OMX Technology - FZE', '9CDFB1': 'Shenzhen Crave Communication Co., LTD', 'ACCF85': 'HUAWEI TECHNOLOGIES CO.,LTD', '3871DE': 'Apple, Inc.', '7081EB': 'Apple, Inc.', '00738D': 'Shenzhen TINNO Mobile Technology Corp.', '34BA75': 'Everest Networks, Inc', '7C18CD': 'E-TRON Co.,Ltd.', '00E0FC': 'HUAWEI TECHNOLOGIES CO.,LTD', '6416F0': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F40304': 'Google, Inc.', '546009': 'Google, Inc.', 'A47733': 'Google, Inc.', '807ABF': 'HTC Corporation', '78F882': 'LG Electronics (Mobile Communications)', 'C02C7A': 'Shenzhen Horn Audio Co.,Ltd.', '1CCB99': 'TCT mobile ltd', 'A42BB0': 'TP-LINK TECHNOLOGIES CO.,LTD.', '188B45': 'Cisco Systems, Inc', '606944': 'Apple, Inc.', 'B0C69A': 'Juniper Networks', '2C6BF5': 'Juniper Networks', 'C42F90': 'Hangzhou Hikvision Digital Technology Co.,Ltd.', 'F4CA24': 'FreeBit Co., Ltd.', '00D0B7': 'Intel Corporation', '001DD6': 'ARRIS Group, Inc.', '903EAB': 'ARRIS Group, Inc.', '306023': 'ARRIS Group, Inc.', '14ABF0': 'ARRIS Group, Inc.', '0014F6': 'Juniper Networks', '901ACA': 'ARRIS Group, Inc.', 'C83FB4': 'ARRIS Group, Inc.', 'E0B70A': 'ARRIS Group, Inc.', '001DCE': 'ARRIS Group, Inc.', '0013E8': 'Intel Corporate', '0013CE': 'Intel Corporate', '2C768A': 'Hewlett Packard', 'C8348E': 'Intel Corporate', '4C3488': 'Intel Corporate', '1002B5': 'Intel Corporate', '004026': 'BUFFALO.INC', '4CE676': 'BUFFALO.INC', '000BCD': 'Hewlett Packard', '000F20': 'Hewlett Packard', '00110A': 'Hewlett Packard', 'B8B81E': 'Intel Corporate', 'B46D83': 'Intel Corporate', '000E35': 'Intel Corporation', '0007E9': 'Intel Corporation', '001708': 'Hewlett Packard', '0017A4': 'Hewlett Packard', 'C005C2': 'ARRIS Group, Inc.', '0030C1': 'Hewlett Packard', '0080A0': 'Hewlett Packard', 'D48564': 'Hewlett Packard', '24BE05': 'Hewlett Packard', 'FC3FDB': 'Hewlett Packard', '308D99': 'Hewlett Packard', '5820B1': 'Hewlett Packard', '9457A5': 'Hewlett Packard', '000EB3': 'Hewlett Packard', '080009': 'Hewlett Packard', '90CDB6': 'Hon Hai Precision Ind. Co.,Ltd.', '40490F': 'Hon Hai Precision Ind. Co.,Ltd.', '00265C': 'Hon Hai Precision Ind. Co.,Ltd.', '002269': 'Hon Hai Precision Ind. Co.,Ltd.', 'D87988': 'Hon Hai Precision Ind. Co.,Ltd.', '74A78E': 'zte corporation', '00092D': 'HTC Corporation', '443192': 'Hewlett Packard', 'A0D3C1': 'Hewlett Packard', '38EAA7': 'Hewlett Packard', 'AC162D': 'Hewlett Packard', '80C16E': 'Hewlett Packard', 'B4B52F': 'Hewlett Packard', 'D07E28': 'Hewlett Packard', 'D0BF9C': 'Hewlett Packard', '7C6193': 'HTC Corporation', '90E7C4': 'HTC Corporation', 'BCEAFA': 'Hewlett Packard', '7446A0': 'Hewlett Packard', '2C44FD': 'Hewlett Packard', '0453D5': 'Sysorex Global Holdings', 'EC52DC': 'WORLD MEDIA AND TECHNOLOGY Corp.', '94B2CC': 'PIONEER CORPORATION', '0452F3': 'Apple, Inc.', '88C255': 'Texas Instruments', 'CC78AB': 'Texas Instruments', '1C7839': 'Shenzhen Tencent Computer System Co., Ltd.', 'FCD733': 'TP-LINK TECHNOLOGIES CO.,LTD.', '5C899A': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'A81B5A': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '2C5BB8': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '08EB74': 'HUMAX Co., Ltd.', 'E005C5': 'TP-LINK TECHNOLOGIES CO.,LTD.', '388345': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'EC888F': 'TP-LINK TECHNOLOGIES CO.,LTD.', '6466B3': 'TP-LINK TECHNOLOGIES CO.,LTD.', '2832C5': 'HUMAX Co., Ltd.', 'F0F336': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'BC4699': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'F483CD': 'TP-LINK TECHNOLOGIES CO.,LTD.', '002127': 'TP-LINK TECHNOLOGIES CO.,LTD.', '5C63BF': 'TP-LINK TECHNOLOGIES CO.,LTD.', '889471': 'Brocade Communications Systems, Inc.', '8C7CFF': 'Brocade Communications Systems, Inc.', '142D27': 'Hon Hai Precision Ind. Co.,Ltd.', '88E3AB': 'HUAWEI TECHNOLOGIES CO.,LTD', 'C40528': 'HUAWEI TECHNOLOGIES CO.,LTD', '3CDFBD': 'HUAWEI TECHNOLOGIES CO.,LTD', '509F27': 'HUAWEI TECHNOLOGIES CO.,LTD', '80717A': 'HUAWEI TECHNOLOGIES CO.,LTD', '0021E8': 'Murata Manufacturing Co., Ltd.', '000E6D': 'Murata Manufacturing Co., Ltd.', '902106': 'BSkyB Ltd', 'D02788': 'Hon Hai Precision Ind. Co.,Ltd.', '904CE5': 'Hon Hai Precision Ind. Co.,Ltd.', '001FE2': 'Hon Hai Precision Ind. Co.,Ltd.', '0016CF': 'Hon Hai Precision Ind. Co.,Ltd.', '2002AF': 'Murata Manufacturing Co., Ltd.', '98F537': 'zte corporation', '5C4CA9': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F4C714': 'HUAWEI TECHNOLOGIES CO.,LTD', '286ED4': 'HUAWEI TECHNOLOGIES CO.,LTD', '001E10': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D47856': 'Avaya Inc', 'D842AC': 'Shanghai Feixun Communication Co.,Ltd.', '5439DF': 'HUAWEI TECHNOLOGIES CO.,LTD', '283CE4': 'HUAWEI TECHNOLOGIES CO.,LTD', '2CF4C5': 'Avaya Inc', '3C3A73': 'Avaya Inc', 'FC8399': 'Avaya Inc', '587F66': 'HUAWEI TECHNOLOGIES CO.,LTD', '64A651': 'HUAWEI TECHNOLOGIES CO.,LTD', '086361': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A01290': 'Avaya Inc', '38BB3C': 'Avaya Inc', 'F873A2': 'Avaya Inc', 'CCF954': 'Avaya Inc', '8C34FD': 'HUAWEI TECHNOLOGIES CO.,LTD', 'ACF7F3': 'Xiaomi Communications Co Ltd', 'D4970B': 'Xiaomi Communications Co Ltd', '8CBEBE': 'Xiaomi Communications Co Ltd', '14F65A': 'Xiaomi Communications Co Ltd', '009EC8': 'Xiaomi Communications Co Ltd', '0C1DAF': 'Xiaomi Communications Co Ltd', '0819A6': 'HUAWEI TECHNOLOGIES CO.,LTD', '3CF808': 'HUAWEI TECHNOLOGIES CO.,LTD', '486276': 'HUAWEI TECHNOLOGIES CO.,LTD', 'B41513': 'HUAWEI TECHNOLOGIES CO.,LTD', 'AC4E91': 'HUAWEI TECHNOLOGIES CO.,LTD', '283152': 'HUAWEI TECHNOLOGIES CO.,LTD', '3480B3': 'Xiaomi Communications Co Ltd', 'F48B32': 'Xiaomi Communications Co Ltd', '009021': 'Cisco Systems, Inc', '0090B1': 'Cisco Systems, Inc', '001AB6': 'Texas Instruments', '0012D1': 'Texas Instruments', '001237': 'Texas Instruments', 'A0E6F8': 'Texas Instruments', '70FF76': 'Texas Instruments', 'D03972': 'Texas Instruments', '5C313E': 'Texas Instruments', 'F4B85E': 'Texas Instruments', '68C90B': 'Texas Instruments', '74882A': 'HUAWEI TECHNOLOGIES CO.,LTD', '4CB16C': 'HUAWEI TECHNOLOGIES CO.,LTD', '04BD70': 'HUAWEI TECHNOLOGIES CO.,LTD', 'D4F513': 'Texas Instruments', '507224': 'Texas Instruments', '0090D9': 'Cisco Systems, Inc', '009092': 'Cisco Systems, Inc', '00102F': 'Cisco Systems, Inc', '00100D': 'Cisco Systems, Inc', '001007': 'Cisco Systems, Inc', '001014': 'Cisco Systems, Inc', '0090BF': 'Cisco Systems, Inc', '0050D1': 'Cisco Systems, Inc', '1CE6C7': 'Cisco Systems, Inc', 'CCD539': 'Cisco Systems, Inc', '4C0082': 'Cisco Systems, Inc', '7C95F3': 'Cisco Systems, Inc', '34DBFD': 'Cisco Systems, Inc', '885A92': 'Cisco Systems, Inc', '00400B': 'Cisco Systems, Inc', '006070': 'Cisco Systems, Inc', '500604': 'Cisco Systems, Inc', '00E01E': 'Cisco Systems, Inc', '00112F': 'ASUSTek COMPUTER INC.', '001BFC': 'ASUSTek COMPUTER INC.', 'A0554F': 'Cisco Systems, Inc', '204C9E': 'Cisco Systems, Inc', '84B802': 'Cisco Systems, Inc', 'B0AA77': 'Cisco Systems, Inc', 'BCC493': 'Cisco Systems, Inc', 'A46C2A': 'Cisco Systems, Inc', 'D0A5A6': 'Cisco Systems, Inc', '3C5EC3': 'Cisco Systems, Inc', '64F69D': 'Cisco Systems, Inc', '000389': 'PLANTRONICS, INC.', 'D072DC': 'Cisco Systems, Inc', '28C7CE': 'Cisco Systems, Inc', 'F40F1B': 'Cisco Systems, Inc', 'F8C288': 'Cisco Systems, Inc', '1C6A7A': 'Cisco Systems, Inc', '001EE5': 'Cisco-Linksys, LLC', '484487': 'Cisco SPVTG', '38C85C': 'Cisco SPVTG', '485B39': 'ASUSTek COMPUTER INC.', 'BCAEC5': 'ASUSTek COMPUTER INC.', '10BF48': 'ASUSTek COMPUTER INC.', '5067AE': 'Cisco Systems, Inc', 'F09E63': 'Cisco Systems, Inc', '6C9989': 'Cisco Systems, Inc', '18E728': 'Cisco Systems, Inc', '001217': 'Cisco-Linksys, LLC', '001310': 'Cisco-Linksys, LLC', '046C9D': 'Cisco Systems, Inc', '84B261': 'Cisco Systems, Inc', 'E448C7': 'Cisco SPVTG', '00101F': 'Cisco Systems, Inc', '54A274': 'Cisco Systems, Inc', '60FB42': 'Apple, Inc.', '64B9E8': 'Apple, Inc.', '001D4F': 'Apple, Inc.', '002312': 'Apple, Inc.', '80E01D': 'Cisco Systems, Inc', 'D8A25E': 'Apple, Inc.', '000A27': 'Apple, Inc.', '183451': 'Apple, Inc.', '0C771A': 'Apple, Inc.', '286ABA': 'Apple, Inc.', '4CB199': 'Apple, Inc.', 'C09F42': 'Apple, Inc.', 'D023DB': 'Apple, Inc.', '70DEE2': 'Apple, Inc.', 'F0CBA1': 'Apple, Inc.', '182032': 'Apple, Inc.', '403CFC': 'Apple, Inc.', '4860BC': 'Apple, Inc.', '3451C9': 'Apple, Inc.', '406C8F': 'Apple, Inc.', '5855CA': 'Apple, Inc.', 'DC2B61': 'Apple, Inc.', '40A6D9': 'Apple, Inc.', '60FACD': 'Apple, Inc.', '003EE1': 'Apple, Inc.', 'FC253F': 'Apple, Inc.', '04F7E4': 'Apple, Inc.', '34C059': 'Apple, Inc.', 'F0D1A9': 'Apple, Inc.', '705681': 'Apple, Inc.', '14109F': 'Apple, Inc.', '040CCE': 'Apple, Inc.', '54EAA8': 'Apple, Inc.', '28E14C': 'Apple, Inc.', 'E4C63D': 'Apple, Inc.', '54E43A': 'Apple, Inc.', '04DB56': 'Apple, Inc.', 'AC3C0B': 'Apple, Inc.', '701124': 'Apple, Inc.', '042665': 'Apple, Inc.', 'EC3586': 'Apple, Inc.', '78FD94': 'Apple, Inc.', '2CBE08': 'Apple, Inc.', 'E8802E': 'Apple, Inc.', '006171': 'Apple, Inc.', '8C7C92': 'Apple, Inc.', 'B03495': 'Apple, Inc.', 'F437B7': 'Apple, Inc.', 'AC7F3E': 'Apple, Inc.', '280B5C': 'Apple, Inc.', 'ACFDEC': 'Apple, Inc.', 'DC9B9C': 'Apple, Inc.', '54724F': 'Apple, Inc.', 'D8CF9C': 'Apple, Inc.', '7C6DF8': 'Apple, Inc.', '04E536': 'Apple, Inc.', 'A8BBCF': 'Apple, Inc.', '6C4008': 'Apple, Inc.', 'FCA386': 'SHENZHEN CHUANGWEI-RGB ELECTRONICS CO.,LTD', '40331A': 'Apple, Inc.', 'CCC760': 'Apple, Inc.', 'BC4CC4': 'Apple, Inc.', 'DC3714': 'Apple, Inc.', '20A2E4': 'Apple, Inc.', '28F076': 'Apple, Inc.', '141357': 'ATP Electronics, Inc.', 'B8B2EB': 'Googol Technology (HK) Limited', 'FCCF43': 'HUIZHOU CITY HUIYANG DISTRICT MEISIQI INDUSTRY DEVELOPMENT CO,.LTD', 'D848EE': 'Hangzhou Xueji Technology Co., Ltd.', 'B4EF04': 'DAIHAN Scientific Co., Ltd.', 'A4DEC9': 'QLove Mobile Intelligence Information Technology (W.H.) Co. Ltd.', 'CCE0C3': 'EXTEN Technologies, Inc.', '646A74': 'AUTH-SERVERS, LLC', '4C8ECC': 'SILKAN SA', 'E435C8': 'HUAWEI TECHNOLOGIES CO.,LTD', '0C54B9': 'Nokia', '84100D': 'Motorola Mobility LLC, a Lenovo Company', 'D00F6D': 'T&W Electronics Company', '908D78': 'D-Link International', '7C7176': 'Wuxi iData Technology Company Ltd.', '7C0191': 'Apple, Inc.', '2C1BC8': 'Hunan Topview Network System CO.,LTD', 'A8474A': 'Hon Hai Precision Ind. Co.,Ltd.', 'C40049': 'Kamama', '80D605': 'Apple, Inc.', '98E848': 'Axiim', '4040A7': 'Sony Mobile Communications Inc', 'C04A09': 'Zhejiang Everbright Communication Equip. Co,. Ltd', 'A01E0B': 'MINIX Technology Limited', '68E8EB': 'Linktel Technologies Co.,Ltd', 'A845CD': 'Siselectron Technology LTD.', 'D0C193': 'SKYBELL, INC', 'AC6462': 'zte corporation', 'C025A2': 'NEC Platforms, Ltd.', '48137E': 'Samsung Electronics Co.,Ltd', '30F772': 'Hon Hai Precision Ind. Co.,Ltd.', 'DC3CF6': 'Atomic Rules LLC', '089B4B': 'iKuai Networks', '4473D6': 'Logitech', '10CC1B': 'Liverock technologies,INC', 'E80734': 'Champion Optical Network Engineering, LLC', 'A43831': 'RF elements s.r.o.', '380546': 'Foctek Photonics, Inc.', 'D48304': 'SHENZHEN FAST TECHNOLOGIES CO.,LTD', 'DC2B2A': 'Apple, Inc.', '9C8DD3': 'Leonton Technologies', 'E41A2C': 'ZPE Systems, Inc.', 'E8BDD1': 'HUAWEI TECHNOLOGIES CO.,LTD', 'F41535': 'SPON Communication Technology Co.,Ltd', '380AAB': 'Formlabs', '382DE8': 'Samsung Electronics Co.,Ltd', 'C08997': 'Samsung Electronics Co.,Ltd', 'A815D6': 'Shenzhen Meione Technology CO., LTD', 'C07CD1': 'PEGATRON CORPORATION', '90D8F3': 'zte corporation', 'D445E8': 'Jiangxi Hongpai Technology Co., Ltd.', '342606': 'CarePredict, Inc.', '38B725': 'Wistron Infocomm (Zhongshan) Corporation', 'ACEC80': 'ARRIS Group, Inc.', '507B9D': 'LCFC(HeFei) Electronics Technology co., ltd', '6C7220': 'D-Link International', '30A243': 'Shenzhen Prifox Innovation Technology Co., Ltd.', '380195': 'Samsung Electronics Co.,Ltd', '246E96': 'Dell Inc.', '44975A': 'SHENZHEN FAST TECHNOLOGIES CO.,LTD', '5045F7': 'Liuhe Intelligence Technology Ltd.', 'AC676F': 'Electrocompaniet A.S.', '640DE6': 'Petra Systems', 'E0553D': 'Cisco Meraki', 'FC335F': 'Polyera', '84D4C8': 'Widex A/S', 'EC21E5': 'Toshiba', '04C23E': 'HTC Corporation', 'E01AEA': 'Allied Telesis, Inc.', '28B9D9': 'Radisys Corporation', 'F4C613': 'Alcatel-Lucent Shanghai Bell Co., Ltd', '445F8C': 'Intercel Group Limited', 'B88981': 'Chengdu InnoThings Technology Co., Ltd.', 'F02624': 'WAFA TECHNOLOGIES CO., LTD.', 'F8F464': 'Rawe Electonic GmbH', '5C5188': 'Motorola Mobility LLC, a Lenovo Company', 'EC0133': 'TRINUS SYSTEMS INC.', '2CC548': 'IAdea Corporation', '14DDA9': 'ASUSTek COMPUTER INC.', '184F32': 'Hon Hai Precision Ind. Co.,Ltd.', 'DCA3AC': 'RBcloudtech', '0CE725': 'Microsoft Corporation', '58F102': 'BLU Products Inc.', 'B4AE2B': 'Microsoft', '949F3E': 'Sonos, Inc.', '3089D3': 'HONGKONG UCLOUDLINK NETWORK TECHNOLOGY LIMITED', '5CB395': 'HUAWEI TECHNOLOGIES CO.,LTD', '906CAC': 'Fortinet, Inc.', '3CDA2A': 'zte corporation', '842E27': 'Samsung Electronics Co.,Ltd', '1CADD1': 'Bosung Electronics Co., Ltd.', '082CB0': 'Network Instruments', 'A013CB': 'Fiberhome Telecommunication Technologies Co.,LTD', 'D00492': 'Fiberhome Telecommunication Technologies Co.,LTD', '1432D1': 'Samsung Electronics Co.,Ltd', '1816C9': 'Samsung Electronics Co.,Ltd', '00FC8D': 'Hitron Technologies. Inc', '84DF19': 'Chuango Security Technology Corporation', 'DC15DB': 'Ge Ruili Intelligent Technology ( Beijing ) Co., Ltd.', 'E0DB10': 'Samsung Electronics Co.,Ltd', '546172': 'ZODIAC AEROSPACE SAS', 'EC60E0': 'AVI-ON LABS', 'B46D35': 'Dalian Seasky Automation Co;Ltd', '3CA31A': 'Oilfind International LLC', '30F335': 'HUAWEI TECHNOLOGIES CO.,LTD', 'E8F2E3': 'Starcor Beijing Co.,Limited', '6459F8': 'Vodafone Omnitel B.V.', '6C4418': 'Zappware', 'A8D409': 'USA 111 Inc', '6C4598': 'Antex Electronic Corp.', '68A378': 'FREEBOX SAS', '340A22': 'TOP-ACCESS ELECTRONICS CO LTD', 'E866C4': 'Diamanti', 'D4D7A9': 'Shanghai Kaixiang Info Tech LTD', '343D98': 'JinQianMao Technology Co.,Ltd.', 'F44713': 'Leading Public Performance Co., Ltd.', '5CA178': 'TableTop Media (dba Ziosk)', '9CBEE0': 'Biosoundlab Co., Ltd.', '0C413E': 'Microsoft Corporation', 'D06F4A': 'TOPWELL INTERNATIONAL HOLDINGS LIMITED', '0492EE': 'iway AG', '807459': "K's Co.,Ltd.", '601970': 'HUIZHOU QIAOXING ELECTRONICS TECHNOLOGY CO., LTD.', 'A408EA': 'Murata Manufacturing Co., Ltd.', 'B87879': 'Roche Diagnostics GmbH', 'D083D4': 'Xtel Wireless ApS', '08D34B': 'Techman Electronics (Changshu) Co., Ltd.', '78A351': 'SHENZHEN ZHIBOTONG ELECTRONICS CO.,LTD', 'E4695A': 'Dictum Health, Inc.', '7C7A53': 'Phytrex Technology Corp.', '107873': 'Shenzhen Jinkeyi Communication Co., Ltd.', '48EE0C': 'D-Link International', 'EC3C88': 'MCNEX Co.,Ltd.', '70AD54': 'Malvern Instruments Ltd', '9000DB': 'Samsung Electronics Co.,Ltd', 'B4EF39': 'Samsung Electronics Co.,Ltd', 'F02A23': 'Creative Next Design', '584704': ' Shenzhen Webridge Technology Co.,Ltd', '74A063': 'HUAWEI TECHNOLOGIES CO.,LTD', 'ECE2FD': 'SKG Electric Group(Thailand) Co., Ltd.', '148F21': 'Garmin International', '9C685B': 'Octonion SA', '7C534A': 'Metamako', 'BC6E64': 'Sony Mobile Communications Inc', 'BCB308': 'HONGKONG RAGENTEK COMMUNICATION TECHNOLOGY CO.,LIMITED', '6C2E72': 'B&B EXPORTING LIMITED', '5CCCFF': 'Techroutes Network Pvt Ltd', '90C35F': 'Nanjing Jiahao Technology Co., Ltd.', 'C808E9': 'LG Electronics', '183A2D': 'Samsung Electronics Co.,Ltd', 'EC74BA': 'Hirschmann Automation and Control GmbH', 'FC3288': 'CELOT Wireless Co., Ltd', 'D87495': 'zte corporation', '5C3B35': 'Gehirn Inc.', 'E4FED9': 'EDMI Europe Ltd', '5CF7C3': 'SYNTECH (HK) TECHNOLOGY LIMITED', '9CE230': 'JULONG CO,.LTD.', '5C41E7': 'Wiatec International Ltd.', '344CA4': 'amazipoint technology Ltd.', 'A8F038': 'SHEN ZHEN SHI JIN HUA TAI ELECTRONICS CO.,LTD', 'ACC73F': 'VITSMO CO., LTD.', '44356F': 'Neterix', '74E277': 'Vizmonet Pte Ltd', '14893E': 'VIXTEL TECHNOLOGIES LIMTED', 'BC54F9': 'Drogoo Technology Co., Ltd.', '78FC14': 'Family Zone Cyber Safety Ltd ', '3809A4': 'Firefly Integrations', 'BCE767': 'Quanzhou TDX Electronics Co., Ltd', 'FCAFAC': 'Socionext Inc.', 'BC4DFB': 'Hitron Technologies. Inc', '2C337A': 'Hon Hai Precision Ind. Co.,Ltd.', '84DDB7': 'Cilag GmbH International', '08EFAB': 'SAYME WIRELESS SENSOR NETWORK', '7076FF': 'KERLINK', '1436C6': 'Lenovo Mobile Communication Technology Ltd.', '68F728': 'LCFC(HeFei) Electronics Technology co., ltd', '382C4A': 'ASUSTek COMPUTER INC.', '307350': 'Inpeco SA', 'DCEC06': 'Heimi Network Technology Co., Ltd.', 'CCBDD3': 'Ultimaker B.V.', '8CE78C': 'DK Networks', '545146': 'AMG Systems Ltd.', '8463D6': 'Microsoft Corporation', 'EC13B2': 'Netonix', '104E07': 'Shanghai Genvision Industries Co.,Ltd', '049B9C': 'Eadingcore Intelligent Technology Co., Ltd.', '842690': 'BEIJING THOUGHT SCIENCE CO.,LTD.', '801967': 'Shanghai Reallytek Information Technology Co.,Ltd', '2CF7F1': 'Seeed Technology Inc.', '3C1E13': 'HANGZHOU SUNRISE TECHNOLOGY CO., LTD', '08115E': 'Bitel Co., Ltd.', '0881BC': 'HongKong Ipro Technology Co., Limited', 'C09879': 'Acer Inc.', 'B84FD5': 'Microsoft Corporation', 'D84A87': 'OI ELECTRIC CO.,LTD', 'F03D29': 'Actility', '88708C': 'Lenovo Mobile Communication Technology Ltd.', '5014B5': 'Richfit Information Technology Co., Ltd', 'CC3F1D': 'Intesis Software SL', 'DCDA4F': 'GETCK TECHNOLOGY, INC', '101218': 'Korins Inc.', '3428F0': 'ATN International Limited', 'CC10A3': 'Beijing Nan Bao Technology Co., Ltd.', '5CAAFD': 'Sonos, Inc.', '14EDE4': 'Kaiam Corporation', 'D01242': 'BIOS Corporation', '603696': 'The Sapling Company', '54FFCF': 'Mopria Alliance', 'F4F26D': 'TP-LINK TECHNOLOGIES CO.,LTD.', '404EEB': 'Higher Way Electronic Co., Ltd.', 'C456FE': 'Lava International Ltd.', 'ACB74F': 'METEL s.r.o.', 'C0EEFB': 'OnePlus Tech (Shenzhen) Ltd', '304225': 'BURG-WÄCHTER KG', 'FCDBB3': 'Murata Manufacturing Co., Ltd.', 'CCF538': '3isysnetworks', 'B8BD79': 'TrendPoint Systems', '74F413': 'Maxwell Forest', 'A00627': 'NEXPA System', '303335': 'Boosty', '44746C': 'Sony Mobile Communications Inc', '4C9EFF': 'Zyxel Communications Corporation', 'BC9CC5': 'Beijing Huafei Technology Co., Ltd.', '5CB8CB': 'Allis Communications', '34F0CA': 'Shenzhen Linghangyuan Digital Technology Co.,Ltd.', '70720D': 'Lenovo Mobile Communication Technology Ltd.', '3CCD5A': 'Technische Alternative GmbH', 'FCAA14': 'GIGA-BYTE TECHNOLOGY CO.,LTD.', '94D60E': 'shenzhen yunmao information technologies co., ltd', '748F4D': 'MEN Mikro Elektronik GmbH', '506787': 'Planet Networks', '8CBF9D': 'Shanghai Xinyou Information Technology Ltd. Co.', '9CAD97': 'Hon Hai Precision Ind. Co.,Ltd.', '882950': 'Netmoon Technology Co., Ltd', '7CE524': 'Quirky, Inc.', 'F4D261': 'SEMOCON Co., Ltd', '48D855': 'Telvent', '08F728': 'GLOBO Multimedia Sp. z o.o. Sp.k.', '206E9C': 'Samsung Electronics Co.,Ltd', '6C2F2C': 'Samsung Electronics Co.,Ltd', 'B009D3': 'Avizia', '60C1CB': 'Fujian Great Power PLC Equipment Co.,Ltd', 'BC25F0': '3D Display Technologies Co., Ltd.', 'C03D46': 'Shanghai Sango Network Technology Co.,Ltd', '64EAC5': 'SiboTech Automation Co., Ltd.', '30F7D7': 'Thread Technology Co., Ltd', '18227E': 'Samsung Electronics Co.,Ltd', '30C7AE': 'Samsung Electronics Co.,Ltd', '44D4E0': 'Sony Mobile Communications Inc', 'FCD5D9': 'Shenzhen SDMC Technology Co., Ltd.', '74DA38': 'Edimax Technology Co. Ltd.', 'E4F4C6': 'NETGEAR', 'CCA0E5': 'DZG Metering GmbH', '60812B': 'Custom Control Concepts', '1C1CFD': 'Dalian Hi-Think Computer Technology, Corp', '90AE1B': 'TP-LINK TECHNOLOGIES CO.,LTD.', '60E327': 'TP-LINK TECHNOLOGIES CO.,LTD.', '48D18E': 'Metis Communication Co.,Ltd', 'F86601': 'Suzhou Chi-tek information technology Co., Ltd', '145645': 'Savitech Corp.', '7062B8': 'D-Link International', 'ACA919': 'TrekStor GmbH', 'B025AA': 'Private', 'D4B43E': 'Messcomp Datentechnik GmbH', '94C014': 'Sorter Sp. j. Konrad Grzeszczyk MichaA, Ziomek', '9CFBF1': 'MESOMATIC GmbH & Co.KG', '1027BE': 'TVIP', '2087AC': 'AES motomation', '709383': 'Intelligent Optical Network High Tech CO.,LTD.', '80D433': 'LzLabs GmbH', 'B0DA00': 'CERA ELECTRONIQUE', '1CAB01': 'Innovolt', 'D8B6D6': 'Blu Tether Limited', '6C2C06': 'OOO NPP Systemotechnika-NN', 'C4913A': 'Shenzhen Sanland Electronic Co., ltd.', '68856A': 'OuterLink Corporation', '9451BF': 'Hyundai ESG', 'F015A0': 'KyungDong One Co., Ltd.', 'C44E1F': 'BlueN', 'B0869E': 'Chloride S.r.L', 'D057A1': 'Werma Signaltechnik GmbH & Co. KG', 'B4B542': 'Hubbell Power Systems, Inc.', '54CDEE': 'ShenZhen Apexis Electronic Co.,Ltd', '88E8F8': 'YONG TAI ELECTRONIC (DONGGUAN) LTD.', '909864': 'Impex-Sat GmbH&amp;Co KG', 'DCE578': 'Experimental Factory of Scientific Engineering and Special Design Department', 'D86595': "Toy's Myth Inc.", 'F4B52F': 'Juniper Networks', 'C0F79D': 'Powercode', 'C4C919': 'Energy Imports Ltd', '38F098': 'Vapor Stone Rail Systems', '64E892': 'Morio Denki Co., Ltd.', 'D8DD5F': 'BALMUDA Inc.', 'D86194': 'Objetivos y Sevicios de Valor Añadido', 'E8FC60': 'ELCOM Innovations Private Limited', '589CFC': 'FreeBSD Foundation', '702C1F': 'Wisol', 'C8D429': 'Muehlbauer AG', 'F85C45': 'IC Nexus Co. Ltd.', 'ACE069': 'ISAAC Instruments', '30B5C2': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'E07F53': 'TECHBOARD SRL', '48FEEA': 'HOMA B.V.', 'E8EA6A': 'StarTech.com', '04DB8A': 'Suntech International Ltd.', '90DFB7': 's.m.s smart microwave sensors GmbH', '085700': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'FC27A2': 'TRANS ELECTRIC CO., LTD.', 'FCBBA1': 'Shenzhen Minicreate Technology Co.,Ltd', 'F08A28': 'JIANGSU HENGSION ELECTRONIC S and T CO.,LTD', '28BB59': 'RNET Technologies, Inc.', '242642': 'SHARP Corporation.', '34DE34': 'zte corporation', 'FC1607': 'Taian Technology(Wuxi) Co.,Ltd.', 'AC02CA': 'HI Solutions, Inc.', '746F3D': 'Contec GmbH', '4C0DEE': 'JABIL CIRCUIT (SHANGHAI) LTD.', 'D0634D': 'Meiko Maschinenbau GmbH &amp; Co. KG', '008B43': 'RFTECH', '8C41F2': 'RDA Technologies Ltd.', 'E036E3': 'Stage One International Co., Ltd.', 'DC052F': 'National Products Inc.', 'D0BD01': 'DS International', '8CDE99': 'Comlab Inc.', '4CE1BB': 'Zhuhai HiFocus Technology Co., Ltd.', '24A87D': 'Panasonic Automotive Systems Asia Pacific(Thailand)Co.,Ltd.', 'EC71DB': 'Shenzhen Baichuan Digital Technology Co., Ltd.', 'A409CB': 'Alfred Kaercher GmbH &amp; Co KG', '8C569D': 'Imaging Solutions Group', '40B6B1': 'SUNGSAM CO,.Ltd', '84FE9E': 'RTC Industries, Inc.', 'FC4B1C': 'INTERSENSOR S.R.L.', '403067': 'Conlog (Pty) Ltd', 'E86183': 'Black Diamond Advanced Technology, LLC', '38C9A9': 'SMART High Reliability Solutions, Inc.', '501AC5': 'Microsoft', 'B0989F': 'LG CNS', 'C0F1C4': 'Pacidal Corporation Ltd.', '600347': 'Billion Electric Co. Ltd.', 'F0D3A7': 'CobaltRay Co., Ltd', '38A53C': 'COMECER Netherlands', '5CFFFF': 'Shenzhen Kezhonglong Optoelectronic Technology Co., Ltd', '085240': 'EbV Elektronikbau- und Vertriebs GmbH', 'B8C1A2': 'Dragon Path Technologies Co., Limited', '80F25E': 'Kyynel', '68692E': 'Zycoo Co.,Ltd', 'D46867': 'Neoventus Design Group', '2C18AE': 'Trend Electronics Co., Ltd.', 'F81CE5': 'Telefonbau Behnke GmbH', '889166': 'Viewcooper Corp.', '103378': 'FLECTRON Co., LTD', '14EDA5': 'Wächter GmbH Sicherheitssysteme', '50C006': 'Carmanah Signs', '04CB1D': 'Traka plc', 'A4E9A3': 'Honest Technology Co., Ltd', 'A0E5E9': 'enimai Inc', '9C216A': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'F862AA': 'xn systems', '107BEF': 'Zyxel Communications Corporation', 'B462AD': 'Elysia Germany GmbH', '345C40': 'Cargt Holdings LLC', '68193F': 'Digital Airways', 'B4750E': 'Belkin International Inc.', '94103E': 'Belkin International Inc.', '7CB733': 'ASKEY COMPUTER CORP', '3051F8': 'BYK-Gardner GmbH', 'E8F226': 'MILLSON CUSTOM SOLUTIONS INC.', '44EE30': 'Budelmann Elektronik GmbH', 'FC1E16': 'IPEVO corp', '3C6E63': 'Mitron OY', 'A4059E': 'STA Infinity LLP', 'A0BF50': 'S.C. ADD-PRODUCTION S.R.L.', 'E8D4E0': 'Beijing BenyWave Technology Co., Ltd.', 'FC019E': 'VIEVU', '642184': 'Nippon Denki Kagaku Co.,LTD', '2464EF': 'CYG SUNRI CO.,LTD.', 'D8270C': 'MaxTronic International Co., Ltd.', 'B4CCE9': 'PROSYST', 'C4D655': 'Tercel technology co.,ltd', '58BDF9': 'Sigrand', 'C0C687': 'Cisco SPVTG', 'C49380': 'Speedytel technology', '007DFA': 'Volkswagen Group of America', '2C7155': 'HiveMotion', '20180E': 'Shenzhen Sunchip Technology Co., Ltd', '80B219': 'ELEKTRON TECHNOLOGY UK LIMITED', 'E0AEB2': 'Bender GmbH &amp; Co.KG', 'F84A7F': 'Innometriks Inc', '74A4B5': 'Powerleader Science and Technology Co. Ltd.', '909F43': 'Accutron Instruments Inc.', '2894AF': 'Samhwa Telecom', 'AC5036': 'Pi-Coral Inc', '0C9B13': 'Shanghai Magic Mobile Telecommunication Co.Ltd.', '94BF1E': 'eflow Inc. / Smart Device Planning and Development Division', 'E8516E': 'TSMART Inc.', 'AC220B': 'ASUSTek COMPUTER INC.', 'B887A8': 'Step Ahead Innovations Inc.', 'E0A198': 'NOJA Power Switchgear Pty Ltd', '88354C': 'Transics', '3C6104': 'Juniper Networks', 'CC4AE1': 'fourtec -Fourier Technologies', '8C4B59': '3D Imaging & Simulations Corp', '5C3327': 'Spazio Italia srl', '507691': 'Tekpea, Inc.', '28F532': 'ADD-Engineering BV', '9440A2': 'Anywave Communication Technologies, Inc.', '1C86AD': 'MCT CO., LTD.', '28D93E': 'Telecor Inc.', '640B4A': 'Digital Telecom Technology Limited', '384233': 'Wildeboer Bauteile GmbH', '3C8AB0': 'Juniper Networks', 'C034B4': 'Gigastone Corporation', 'CCE8AC': 'SOYEA Technology Co.,Ltd.', '70F176': 'Data Modul AG', 'B847C6': 'SanJet Technology Corp.', 'B8CD93': 'Penetek, Inc', '28DB81': 'Shanghai Guao Electronic Technology Co., Ltd', '3806B4': 'A.D.C. GmbH', 'B8241A': 'SWEDA INFORMATICA LTDA', 'A0B100': 'ShenZhen Cando Electronics Co.,Ltd', '201D03': 'Elatec GmbH', 'E067B3': 'Shenzhen C-Data Technology Co., Ltd', '1C4AF7': 'AMON INC', 'E4776B': 'AARTESYS AG', '30F31D': 'zte corporation', 'A0EC80': 'zte corporation', '0CC81F': 'Summer Infant, Inc.', 'A8FB70': 'WiseSec L.t.d', 'E481B3': 'Shenzhen ACT Industrial Co.,Ltd.', 'C06C6D': 'MagneMotion, Inc.', 'E880D8': 'GNTEK Electronics Co.,Ltd.', '303EAD': 'Sonavox Canada Inc', 'F835DD': 'Gemtek Technology Co., Ltd.', 'D8B04C': 'Jinan USR IOT Technology Co., Ltd.', 'CC04B4': 'Select Comfort', '5C15E1': 'AIDC TECHNOLOGY (S) PTE LTD', 'E8519D': 'Yeonhab Precision Co.,LTD', 'DC5726': 'Power-One', 'F8DADF': 'EcoTech, Inc.', '30AE7B': 'Deqing Dusun Electron CO., LTD', '68EC62': 'YODO Technology Corp. Ltd.', '188857': 'Beijing Jinhong Xi-Dian Information Technology Corp.', 'B8C855': 'Shanghai GBCOM Communication Technology Co.,Ltd.', '78303B': 'Stephen Technologies Co.,Limited', 'CC1AFA': 'zte corporation', '6C8B2F': 'zte corporation', '8C5AF0': 'Exeltech Solar Products', 'B0808C': 'Laser Light Engines', 'D8DCE9': 'Kunshan Erlab ductless filtration system Co.,Ltd', 'DCD52A': 'Sunny Heart Limited', 'D866C6': 'Shenzhen Daystar Technology Co.,ltd', 'D00EA4': 'Porsche Cars North America', '784B08': 'f.robotics acquisitions ltd', '84E4D9': 'Shenzhen NEED technology Ltd.', '40BC73': 'Cronoplast S.L.', '4007C0': 'Railtec Systems GmbH', 'A4D3B5': 'GLITEL Stropkov, s.r.o.', 'D43A65': 'IGRS Engineering Lab Ltd.', 'F499AC': 'WEBER Schraubautomaten GmbH', 'D05099': 'ASRock Incorporation', 'A49EDB': 'AutoCrib, Inc.', '1C76CA': 'Terasic Technologies Inc.', '888964': 'GSI Electronics Inc.', '7CD844': 'Enmotus Inc', '44619C': 'FONsystem co. ltd.', '70820E': 'as electronics GmbH', '0C1105': 'AKUVOX (XIAMEN) NETWORKS CO., LTD', '5C22C4': 'DAE EUN ELETRONICS CO., LTD', 'F8FEA8': 'Technico Japan Corporation', '1800DB': 'Fitbit Inc.', '78A106': 'TP-LINK TECHNOLOGIES CO.,LTD.', '6C6126': 'Rinicom Holdings', '88685C': 'Shenzhen ChuangDao & Perpetual Eternal Technology Co.,Ltd', '102831': 'Morion Inc.', 'EC2C49': 'University of Tokyo', 'D82916': 'Ascent Communication Technology', '2CF203': 'EMKO ELEKTRONIK SAN VE TIC AS', 'B4FE8C': 'Centro Sicurezza Italia SpA', '40E730': 'DEY Storage Systems, Inc.', '68B094': 'INESA ELECTRON CO.,LTD', 'A073FC': 'Rancore Technologies Private Limited', '44F849': 'Union Pacific Railroad', 'CC0DEC': 'Cisco SPVTG', '1C37BF': 'Cloudium Systems Ltd.', '50ABBF': 'Hoseo Telecom', '0C722C': 'TP-LINK TECHNOLOGIES CO.,LTD.', '9CE635': 'Nintendo Co., Ltd.', '60A44C': 'ASUSTek COMPUTER INC.', '185AE8': 'Zenotech.Co.,Ltd', 'C47DCC': 'Zebra Technologies Inc', 'E0AEED': 'LOENK', 'E492E7': 'Gridlink Tech. Co.,Ltd.', 'CC047C': 'G-WAY Microwave', '64535D': 'Frauscher Sensortechnik', '3C6FF7': 'EnTek Systems, Inc.', '2C7B5A': 'Milper Ltd', 'D4BF2D': 'SE Controls Asia Pacific Ltd', 'E0D9A2': 'Hippih aps', 'FC0647': 'Cortland Research, LLC', '6CD146': 'FRAMOS GmbH', '54E032': 'Juniper Networks', '7076DD': 'Oxyguard International A/S', '5461EA': 'Zaplox AB', 'D08B7E': 'Passif Semiconductor', '04586F': 'Sichuan Whayer information industry Co.,LTD', 'FC9FAE': 'Fidus Systems Inc', '681E8B': 'InfoSight Corporation', 'D052A8': 'Physical Graph Corporation', 'CC3A61': 'SAMSUNG ELECTRO MECHANICS CO., LTD.', 'F8D7BF': 'REV Ritter GmbH', '48BE2D': 'Symanitron', 'F02329': 'SHOWA DENKI CO.,LTD.', 'F073AE': 'PEAK-System Technik', '48B8DE': 'HOMEWINS TECHNOLOGY CO.,LTD.', '10EA59': 'Cisco SPVTG', '0C191F': 'Inform Electronik', '1065CF': 'IQSIM', '684CA8': 'Shenzhen Herotel Tech. Co., Ltd.', '98208E': 'Definium Technologies', '704AE4': 'Rinstrum Pty Ltd', '083AB8': 'Shinoda Plasma Co., Ltd.', 'A0DD97': 'PolarLink Technologies, Ltd', 'EC89F5': 'Lenovo Mobile Communication Technology Ltd.', 'B49842': 'zte corporation', '7054D2': 'PEGATRON CORPORATION', '645A04': 'Chicony Electronics Co., Ltd.', 'AC1702': 'Fibar Group sp. z o.o.', '984CD3': 'Mantis Deposition', '08606E': 'ASUSTek COMPUTER INC.', '3C57D5': 'FiveCo', 'F84897': 'Hitachi, Ltd.', 'F80BD0': 'Datang Telecom communication terminal (Tianjin) Co., Ltd.', 'E89AFF': 'Fujian LANDI Commercial Equipment Co.,Ltd', '0C8CDC': 'Suunto Oy', '60C5A8': 'Beijing LT Honway Technology Co.,Ltd', '28D244': 'LCFC(HeFei) Electronics Technology Co., Ltd.', 'B4DF3B': 'Chromlech', '7C9A9B': 'VSE valencia smart energy', '84E714': 'Liang Herng Enterprise,Co.Ltd.', 'B829F7': 'Blaster Tech', 'E4A7FD': 'Cellco Partnership', '2CE2A8': 'DeviceDesign', '00B56D': 'David Electronics Co., LTD.', '2C3557': 'ELLIY Power CO..Ltd', 'B80415': 'Bayan Audio', 'D4136F': 'Asia Pacific Brands', 'C8E1A7': 'Vertu Corporation Limited', '4CAB33': 'KST technology', 'F4472A': 'Nanjing Rousing Sci. and Tech. Industrial Co., Ltd', 'DC028E': 'zte corporation', 'A845E9': 'Firich Enterprises CO., LTD.', '485261': 'SOREEL', '646223': 'Cellient Co., Ltd.', '98473C': 'SHANGHAI SUNMON COMMUNICATION TECHNOGY CO.,LTD', '5481AD': 'Eagle Research Corporation', '54A04F': 't-mac Technologies Ltd', '14DB85': 'S NET MEDIA', 'B8DAF1': 'Strahlenschutz- Entwicklungs- und Ausruestungsgesellschaft mbH', 'D45C70': 'Wi-Fi Alliance', 'EC473C': 'Redwire, LLC', '3CC12C': 'AES Corporation', '949BFD': 'Trans New Technology, Inc.', 'A00ABF': 'Wieson Technologies Co., Ltd.', '8CCDE8': 'Nintendo Co., Ltd.', '7CB232': 'Hui Zhou Gaoshengda Technology Co.,LTD', '00E666': 'ARIMA Communications Corp.', '34BDFA': 'Cisco SPVTG', 'F41E26': 'Simon-Kaloi Engineering', '702526': 'Nokia', '18D949': 'Qvis Labs, LLC', 'D808F5': 'Arcadia Networks Co. Ltd. ', '0CC47E': 'EUCAST Co., Ltd.', '50724D': 'BEG Brueck Electronic GmbH', '783CE3': 'Kai-EE', 'B898B0': 'Atlona Inc.', '24694A': 'Jasmine Systems Inc.', '080C0B': 'SysMik GmbH Dresden', 'DCBF90': 'HUIZHOU QIAOXING TELECOMMUNICATION INDUSTRY CO.,LTD.', '049F06': 'Smobile Co., Ltd.', '289A4B': 'SteelSeries ApS', 'A08C15': 'Gerhard D. Wempe KG', '90CF6F': 'Dlogixs Co Ltd', 'C8FB26': 'Cisco SPVTG', 'ACBD0B': 'IMAC CO.,LTD', 'B85810': 'NUMERA, INC.', 'ECD950': 'IRT SA', '7C02BC': 'Hansung Electronics Co. LTD', 'B82410': 'Magneti Marelli Slovakia s.r.o.', '105F49': 'Cisco SPVTG', '1C5C60': 'Shenzhen Belzon Technology Co.,LTD.', 'B8B94E': 'Shenzhen iBaby Labs, Inc.', 'ACC698': 'Kohzu Precision Co., Ltd.', '7C386C': 'Real Time Logic', '2067B1': 'Pluto inc.', '189A67': 'CSE-Servelec Limited', '087D21': 'Altasec technology corporation', 'F8051C': 'DRS Imaging and Targeting Solutions', '78D34F': 'Pace-O-Matic, Inc.', 'A4466B': 'EOC Technology', '901EDD': 'GREAT COMPUTER CORPORATION', '34D7B4': 'Tributary Systems, Inc.', 'F40F9B': 'WAVELINK', '645FFF': 'Nicolet Neuro', 'AC7A42': 'iConnectivity', '700BC0': 'Dewav Technology Company', 'CC14A6': 'Yichun MyEnergy Domain, Inc', '109FA9': 'Actiontec Electronics, Inc', '647C34': 'Ubee Interactive Co., Limited', 'C0A364': '3D Systems Massachusetts', '1C5FFF': 'Beijing Ereneben Information Technology Co.,Ltd Shenzhen Branch', '6045BD': 'Microsoft', '241064': 'Shenzhen Ecsino Tecnical Co. Ltd', '7CEBEA': 'ASCT', '9C0DAC': 'Tymphany HK Limited', '70B599': 'Embedded Technologies s.r.o.', 'EC4C4D': 'ZAO NPK RoTeK', 'A4D18F': 'Shenzhen Skyee Optical Fiber Communication Technology Ltd. ', '58343B': 'Glovast Technology Ltd.', '889676': 'TTC MARCONI s.r.o.', '5C1737': 'I-View Now, LLC.', 'AC0A61': 'Labor S.r.L.', '1C43EC': 'JAPAN CIRCUIT CO.,LTD', '54D1B0': 'Universal Laser Systems, Inc', '785262': 'Shenzhen Hojy Software Co., Ltd.', '746A89': 'Rezolt Corporation', '702F4B': 'PolyVision Inc.', '741489': 'SRT Wireless', '241B13': 'Shanghai Nutshell Electronic Co., Ltd.', '20014F': 'Linea Research Ltd', 'EC0ED6': 'ITECH INSTRUMENTS SAS', '240917': 'Devlin Electronics Limited', '9C54CA': 'Zhengzhou VCOM Science and Technology Co.,Ltd', 'B43564': 'Fujian Tian Cheng Electron Science & Technical Development Co.,Ltd.', '00BF15': 'Genetec Inc.', '38EE9D': 'Anedo Ltd.', '78BEBD': 'STULZ GmbH', 'D4DF57': 'Alpinion Medical Systems', '5048EB': 'BEIJING HAIHEJINSHENG NETWORK TECHNOLOGY CO. LTD.', 'B40E96': 'HERAN ', '508C77': 'DIRMEIER Schanktechnik GmbH &Co KG', '40704A': 'Power Idea Technology Limited', '545EBD': 'NL Technologies', 'F47F35': 'Cisco Systems, Inc', 'BCC168': 'DinBox Sverige AB', 'DC309C': 'Heyrex Limited', '2C00F7': 'XOS', '28F606': 'Syes srl', '0CAF5A': 'GENUS POWER INFRASTRUCTURES LIMITED', '7CEF8A': 'Inhon International Ltd.', '241125': 'Hutek Co., Ltd.', 'B431B8': 'Aviwest', 'CC187B': 'Manzanita Systems, Inc.', '08BE09': 'Astrol Electronic AG', 'B41DEF': 'Internet Laboratories, Inc.', '809393': 'Xapt GmbH', 'A007B6': 'Advanced Technical Support, Inc.', '0CD2B5': 'Binatone Telecommunication Pvt. Ltd', '4846F1': 'Uros Oy', 'B827EB': 'Raspberry Pi Foundation', '84AF1F': 'Beat System Service Co,. Ltd.', 'B058C4': 'Broadcast Microwave Services, Inc', '745798': 'TRUMPF Laser GmbH + Co. KG', '2817CE': 'Omnisense Ltd', '3CCE73': 'Cisco Systems, Inc', 'B0BD6D': 'Echostreams Innovative Solutions', '6044F5': 'Easy Digital Ltd.', '90AC3F': 'BrightSign LLC', 'AC51EE': 'Cambridge Communication Systems Ltd', '78A183': 'Advidia', 'A8D0E5': 'Juniper Networks', 'F89955': 'Fortress Technology Inc', '4CC94F': 'Nokia', '0CE5D3': 'DH electronics GmbH', 'E425E9': 'Color-Chip', '14B1C8': 'InfiniWing, Inc.', 'E840F2': 'PEGATRON CORPORATION', '50053D': 'CyWee Group Ltd', 'F88C1C': 'KAISHUN ELECTRONIC TECHNOLOGY CO., LTD. BEIJING', '1C0B52': 'EPICOM S.A', '747E2D': 'Beijing Thomson CITIC Digital Technology Co. LTD.', '885C47': 'Alcatel Lucent', '3CC1F6': 'Melange Systems Pvt. Ltd.', '94FAE8': 'Shenzhen Eycom Technology Co., Ltd ', 'B48255': 'Research Products Corporation', '8016B7': 'Brunel University', '008DDA': 'Link One Co., Ltd.', 'C49805': 'Minieum Networks, Inc', '90F4C1': 'Rand McNally', '18193F': 'Tamtron Oy', '944444': 'LG Innotek', '4C64D9': 'Guangdong Leawin Group Co., Ltd', '940149': 'AutoHotBox', '1CB094': 'HTC Corporation', '9C0111': 'Shenzhen Newabel Electronic Co., Ltd.', '400E67': 'Tremol Ltd.', 'C0DF77': 'Conrad Electronic SE', '3055ED': 'Trex Network LLC', 'F0F755': 'Cisco Systems, Inc', '1CB243': 'TDC A/S', '38BF33': 'NEC CASIO Mobile Communications', 'B467E9': 'Qingdao GoerTek Technology Co., Ltd.', '186751': 'KOMEG Industrielle Messtechnik GmbH', '645EBE': 'Yahoo! JAPAN', 'CCC50A': 'SHENZHEN DAJIAHAO TECHNOLOGY CO.,LTD', '1CB17F': 'NEC Platforms, Ltd.', 'E42C56': 'Lilee Systems, Ltd.', '48ED80': 'daesung eltec', 'C80718': 'TDSi', '581D91': 'Advanced Mobile Telecom co.,ltd.', 'D8BF4C': 'Victory Concept Electronics Limited', '3CB9A6': 'Belden Deutschland GmbH', '8C0CA3': 'Amper', '94DF58': 'IJ Electron CO.,Ltd.', '48D54C': 'Jeda Networks', '8CDE52': 'ISSC Technologies Corp.', '082522': 'ADVANSEE', '4C2F9D': 'ICM Controls', 'E467BA': 'Danish Interpretation Systems A/S', 'BCB852': 'Cybera, Inc.', 'C49300': '8Devices', '6CA682': 'EDAM information & communications', '28D576': 'Premier Wireless, Inc.', '70D6B6': 'Metrum Technologies', 'C0A0DE': 'Multi Touch Oy', '40BC8B': 'itelio GmbH', '9CA134': 'Nike, Inc.', '50FC30': 'Treehouse Labs', 'B89674': 'AllDSP GmbH & Co. KG', '48E1AF': 'Vity', '1CBBA8': 'OJSC Ufimskiy Zavod Promsvyaz', '006BA0': 'SHENZHEN UNIVERSAL INTELLISYS PTE LTD', 'A898C6': 'Shinbo Co., Ltd.', 'B4211D': 'Beijing GuangXin Technology Co., Ltd', '903CAE': 'Yunnan KSEC Digital Technology Co.,Ltd.', '70704C': 'Purple Communications, Inc', 'D89760': 'C2 Development, Inc.', 'F47ACC': 'SolidFire, Inc.', '905682': 'Lenbrook Industries Limited', 'F0DA7C': 'RLH INDUSTRIES,INC.', 'AC319D': 'Shenzhen TG-NET Botone Technology Co.,Ltd.', '20107A': 'Gemtek Technology Co., Ltd.', '78DDD6': 'c-scape', 'C09132': 'Patriot Memory', '90185E': 'Apex Tool Group GmbH & Co OHG', 'F8FE5C': 'Reciprocal Labs Corp', '6C9CED': 'Cisco Systems, Inc', '2486F4': 'Ctek, Inc.', 'C4237A': 'WhizNets Inc.', 'F4E6D7': 'Solar Power Technologies, Inc.', 'B87424': 'Viessmann Elektronik GmbH', 'B451F9': 'NB Software', '30168D': 'ProLon', 'E4AFA1': 'HES-SO', 'A887ED': 'ARC Wireless LLC', 'D4D249': 'Power Ethernet', '80427C': 'Adolf Tedsen GmbH & Co. KG', 'E0DADC': 'JVC KENWOOD Corporation', 'E843B6': 'QNAP Systems, Inc.', 'B89BC9': 'SMC Networks Inc', '409FC7': 'BAEKCHUN I&C Co., Ltd.', '00FC58': 'WebSilicon Ltd.', '983571': 'Sub10 Systems Ltd', '5404A6': 'ASUSTek COMPUTER INC.', '186D99': 'Adanis Inc.', 'A0E201': 'AVTrace Ltd.(China)', 'F0DEB9': 'ShangHai Y&Y Electronics Co., Ltd', '7CA61D': 'MHL, LLC', '9CF67D': 'Ricardo Prague, s.r.o.', 'F82F5B': 'eGauge Systems LLC', '98C845': 'PacketAccess', '989080': 'Linkpower Network System Inc Ltd.', 'F83376': 'Good Mind Innovation Co., Ltd.', '50F61A': 'Kunshan JADE Technologies co., Ltd.', '542018': 'Tely Labs', '581FEF': 'Tuttnaer LTD', '58BDA3': 'Nintendo Co., Ltd.', 'F8F25A': 'G-Lab GmbH', '307ECB': 'SFR', '68F125': 'Data Controls Inc.', 'BC764E': 'Rackspace US, Inc.', 'CCC8D7': 'CIAS Elettronica srl', '84D32A': 'IEEE 1905.1', '4C0289': 'LEX COMPUTECH CO., LTD', 'C0E54E': 'ARIES Embedded GmbH', 'F8C001': 'Juniper Networks', '187C81': 'Valeo Vision Systems', 'ACCC8E': 'Axis Communications AB', '8C94CF': 'Encell Technology, Inc.', '6CA780': 'Nokia Corporation', '3057AC': 'IRLAB LTD.', '842B50': 'Huria Co.,Ltd.', '48F7F1': 'Nokia', '8C8E76': 'taskit GmbH', 'A0133B': 'HiTi Digital, Inc.', '9C5711': 'Feitian Xunda(Beijing) Aeronautical Information Technology Co., Ltd.', '88F488': 'cellon communications technology(shenzhen)Co.,Ltd.', '448E12': 'DT Research, Inc.', 'B8BB6D': 'ENERES Co.,Ltd.', '14373B': 'PROCOM Systems', '1897FF': 'TechFaith Wireless Technology Limited', '4C5585': 'Hamilton Systems', 'ECEA03': 'DARFON LIGHTING CORP', '30F9ED': 'Sony Corporation', '582EFE': 'Lighting Science Group', 'CC60BB': 'Empower RF Systems', '7CDD20': 'IOXOS Technologies S.A.', 'ECF236': 'NEOMONTANA ELECTRONICS', '0418B6': 'Private', 'E4A5EF': 'TRON LINK ELECTRONICS CO., LTD.', '3071B2': 'Hangzhou Prevail Optoelectronic Equipment Co.,LTD.', 'DCCE41': 'FE GLOBAL HONG KONG LIMITED', 'FC6C31': 'LXinstruments GmbH', '705CAD': 'Konami Gaming Inc', '3C6F45': 'Fiberpro Inc.', '703187': 'ACX GmbH', '30E4DB': 'Cisco Systems, Inc', '88E0F3': 'Juniper Networks', '80971B': 'Altenergy Power System,Inc.', '587675': 'Beijing ECHO Technologies Co.,Ltd', '0C51F7': 'CHAUVIN ARNOUX', '0CFC83': 'Airoha Technology Corp.,', '007F28': 'Actiontec Electronics, Inc', '804731': 'Packet Design, Inc.', 'B09BD4': 'GNH Software India Private Limited', 'F08BFE': 'COSTEL.,CO.LTD', '3C26D5': 'Sotera Wireless', 'E84E06': 'EDUP INTERNATIONAL (HK) CO., LTD', '00077D': 'Cisco Systems, Inc', 'CCD9E9': 'SCR Engineers Ltd.', '34A709': 'Trevil srl', 'E0C922': 'Jireh Energy Tech., Ltd.', '905F8D': 'modas GmbH', '98293F': 'Fujian Start Computer Equipment Co.,Ltd', 'B45CA4': 'Thing-talk Wireless Communication Technologies Corporation Limited', '908D1D': 'GH Technologies', '444F5E': 'Pan Studios Co.,Ltd.', 'D0AFB6': 'Linktop Technology Co., LTD', '98EC65': 'Cosesy ApS', 'ACC935': 'Ness Corporation', '008D4E': 'CJSC NII STT', '98F8DB': 'Marini Impianti Industriali s.r.l.', 'E41289': 'topsystem Systemhaus GmbH', '58E808': 'AUTONICS CORPORATION', 'DC05ED': 'Nabtesco Corporation', '4C98EF': 'Zeo', '00A1DE': 'ShenZhen ShiHua Technology CO.,LTD', '806CBC': 'NET New Electronic Technology GmbH', '909060': 'RSI VIDEO TECHNOLOGIES', 'BC8199': 'BASIC Co.,Ltd.', 'DCA7D9': 'Compressor Controls Corp', '38FEC5': 'Ellips B.V.', 'C455A6': 'Cadac Holdings Ltd', '5C7757': 'Haivision Network Video', 'D4D898': 'Korea CNO Tech Co., Ltd', 'B4AA4D': 'Ensequence, Inc.', 'B83D4E': 'Shenzhen Cultraview Digital Technology Co.,Ltd Shanghai Branch', '5087B8': 'Nuvyyo Inc', 'C0EAE4': 'Sonicwall', '0838A5': 'Funkwerk plettac electronic GmbH', 'CC1EFF': 'Metrological Group BV', '184E94': 'MESSOA TECHNOLOGIES INC.', '6C391D': 'Beijing ZhongHuaHun Network Information center', '80B32A': 'UK Grid Solutions Ltd', '405539': 'Cisco Systems, Inc', 'E0F211': 'Digitalwatt', 'F86971': 'Seibu Electric Co.,', '44AA27': 'udworks Co., Ltd.', 'E8F928': 'RFTECH SRL', '1C955D': 'I-LAX ELECTRONICS INC.', '60F59C': 'CRU-Dataport', 'B0A72A': 'Ensemble Designs, Inc.', '6400F1': 'Cisco Systems, Inc', 'B8F4D0': 'Herrmann Ultraschalltechnik GmbH & Co. Kg', '08ACA5': 'Benu Video, Inc.', '586D8F': 'Cisco-Linksys, LLC', '10E3C7': 'Seohwa Telecom', '7465D1': 'Atlinks', '040A83': 'Alcatel-Lucent', 'C45600': 'Galleon Embedded Computing', 'BC3E13': 'Accordance Systems Inc.', 'A81B18': 'XTS CORP', 'D0A311': 'Neuberger Gebäudeautomation GmbH', '041D10': 'Dream Ware Inc.', '801440': 'Sunlit System Technology Corp', '180B52': 'Nanotron Technologies GmbH', 'DC07C1': 'HangZhou QiYang Technology Co.,Ltd.', 'C0A26D': 'Abbott Point of Care', '00BB8E': 'HME Co., Ltd.', 'D82A7E': 'Nokia Corporation', '801F02': 'Edimax Technology Co. Ltd.', '58EECE': 'Icon Time Systems', '647FDA': 'TEKTELIC Communications Inc.', 'AC0613': 'Senselogix Ltd', '747818': 'Jurumani Solutions', 'E01F0A': 'Xslent Energy Technologies. LLC', '443719': '2 Save Energy Ltd', '84EA99': 'Vieworks', 'E00C7F': 'Nintendo Co., Ltd.', 'E48AD5': 'RF WINDOW CO., LTD.', 'FCF1CD': 'OPTEX-FA CO.,LTD.', '4425BB': 'Bamboo Entertainment Corporation', '7CDA84': 'Dongnian Networks Inc.', 'BCC61A': 'SPECTRA EMBEDDED SYSTEMS', '0470BC': 'Globalstar Inc.', '88DD79': 'Voltaire', '64F987': 'Avvasi Inc.', 'D85D84': 'CAx soft GmbH', 'D4E32C': 'S. Siedle & Sohne', '7C6ADB': 'SafeTone Technology Co.,Ltd', '902E87': 'LabJack', 'A424B3': 'FlatFrog Laboratories AB', '94CDAC': 'Creowave Oy', '144C1A': 'Max Communication GmbH', '340804': 'D-Link Corporation', 'F05D89': 'Dycon Limited', 'AC80D6': 'Hexatronic AB', '9CF938': 'AREVA NP GmbH', '8CDB25': 'ESG Solutions', 'FC3598': 'Favite Inc.', '90D92C': 'HUG-WITSCHI AG', '988E34': 'ZHEJIANG BOXSAM ELECTRONIC CO.,LTD', 'C471FE': 'Cisco Systems, Inc', '4022ED': 'Digital Projection Ltd', '989449': 'Skyworth Wireless Technology Ltd.', '2CA157': 'acromate, Inc.', '942053': 'Nokia Corporation', '28852D': 'Touch Networks', 'B8BA68': "Xi'an Jizhong Digital Communication Co.,Ltd", 'B0B32B': 'Slican Sp. z o.o.', '5842E4': 'Baxter International Inc', 'B8797E': 'Secure Meters (UK) Limited', 'CC9E00': 'Nintendo Co., Ltd.', '58A76F': 'iD corporation', '0006F6': 'Cisco Systems, Inc', '747DB6': 'Aliwei Communications, Inc', 'B06563': 'Shanghai Railway Communication Factory', '4018D7': 'Smartronix, Inc.', '4C2C80': 'Beijing Skyway Technologies Co.,Ltd ', 'D49E6D': 'Wuhan Zhongyuan Huadian Science & Technology Co.,', '6C2E33': 'Accelink Technologies Co.,Ltd.', '40987B': 'Aisino Corporation', 'BC38D2': 'Pandachip Limited', '2005E8': 'OOO InProMedia', 'F00248': 'SmarteBuilding', 'AC6F4F': 'Enspert Inc', 'E82877': 'TMY Co., Ltd.', 'F0C88C': 'LeddarTech Inc.', '4037AD': 'Macro Image Technology, Inc.', '28ED58': 'JAG Jakob AG', '486B91': 'Fleetwood Group Inc.', '643409': 'BITwave Pte Ltd', '40C245': 'Shenzhen Hexicom Technology Co., Ltd.', 'CC55AD': 'RIM', 'E8757F': 'FIRS Technologies(Shenzhen) Co., Ltd', 'F0F7B3': 'Phorm', '00D38D': 'Hotel Technology Next Generation', 'C83EA7': 'KUNBUS GmbH', '60893C': 'Thermo Fisher Scientific P.O.A.', 'D86BF7': 'Nintendo Co., Ltd.', '74CD0C': 'Smith Myers Communications Ltd.', 'CCCE40': 'Janteq Corp', 'B8EE79': 'YWire Technologies, Inc.', '74D675': 'WYMA Tecnologia', 'B40EDC': 'LG-Ericsson Co.,Ltd.', 'E0EE1B': 'Panasonic Automotive Systems Company of America', '74BE08': 'ATEK Products, LLC', '6063FD': 'Transcend Communication Beijing Co.,Ltd.', 'D8B6C1': 'NetworkAccountant, Inc.', '74A4A7': 'QRS Music Technologies, Inc.', '700258': '01DB-METRAVIB', 'F455E0': 'Niceway CNC Technology Co.,Ltd.Hunan Province', '20FDF1': '3COM EUROPE LTD', '8497B8': 'Memjet Inc.', 'A0DDE5': 'SHARP Corporation', '206AFF': 'Atlas Elektronik UK Limited', 'AC34CB': 'Shanhai GBCOM Communication Technology Co. Ltd', '9088A2': 'IONICS TECHNOLOGY ME LTDA', '40CD3A': 'Z3 Technology', '482CEA': 'Motorola Inc Business Light Radios', '00336C': 'SynapSense Corporation', '2CD1DA': 'Sanjole, Inc.', 'F866F2': 'Cisco Systems, Inc', 'AC6123': 'Drivven, Inc.', '100C24': 'pomdevices, LLC', '58F6BF': 'Kyoto University', '78EC22': 'Shanghai Qihui Telecom Technology Co., LTD', '0C15C5': 'SDTEC Co., Ltd.', 'FC75E6': 'Handreamnet', 'A0A763': 'Polytron Vertrieb GmbH', 'EC2368': 'IntelliVoice Co.,Ltd.', '749050': 'Renesas Electronics Corporation', '389592': 'Beijing Tendyron Corporation', 'A4218A': 'Nortel Networks', 'F8FB2F': 'Santur Corporation', '2CCD43': 'Summit Technology Group', '64995D': 'LGE ', '7CEF18': 'Creative Product Design Pty. Ltd.', 'FC7CE7': 'FCI USA LLC', '145412': 'Entis Co., Ltd.', '7C3E9D': 'PATECH', '244597': 'GEMUE Gebr. Mueller Apparatebau', '78818F': 'Server Racks Australia Pty Ltd', '284846': 'GridCentric Inc.', '30694B': 'RIM', '6C626D': "Micro-Star INT'L CO., LTD", '28068D': 'ITL, LLC', 'C00D7E': 'Additech, Inc.', '84C7A9': 'C3PO S.A.', 'D87157': 'Lenovo Mobile Communication Technology Ltd.', '609AA4': 'GVI SECURITY INC.', '641084': 'HEXIUM Technical Development Co., Ltd.', '342109': 'Jensen Scandinavia AS', '3C106F': 'ALBAHITH TECHNOLOGIES ', '0CDDEF': 'Nokia Corporation', 'ECFE7E': 'BlueRadios, Inc.', 'E4AD7D': 'SCL Elements', 'F09CBB': 'RaonThink Inc.', '10CCDB': 'AXIMUM PRODUITS ELECTRONIQUES', '38C7BA': 'CS Services Co.,Ltd.', 'EC5C69': 'MITSUBISHI HEAVY INDUSTRIES MECHATRONICS SYSTEMS,LTD.', '6C92BF': 'Inspur Electronic Information Industry Co.,Ltd.', '44A8C2': 'SEWOO TECH CO., LTD', 'AC9A96': 'Lantiq Deutschland GmbH', '80EE73': 'Shuttle Inc.', 'FCE23F': 'CLAY PAKY SPA', '58570D': 'Danfoss Solar Inverters', 'C4823F': 'Fujian Newland Auto-ID Tech. Co,.Ltd.', 'E85B5B': 'LG ELECTRONICS INC', 'BCA9D6': 'Cyber-Rain, Inc.', '6C3E9C': 'KE Knestel Elektronik GmbH', '8CD628': 'Ikor Metering', '243C20': 'Dynamode Group', '3C39C3': 'JW Electronics Co., Ltd.', '3C05AB': 'Product Creation Studio', 'F04335': 'DVN(Shanghai)Ltd.', 'A479E4': 'KLINFO Corp', '481BD2': 'Intron Scientific co., ltd.', 'D0F0DB': 'Ericsson', '7C1476': 'Damall Technologies SAS', '8C541D': 'LGE ', '00A2DA': 'INAT GmbH', '003CC5': 'WONWOO Engineering Co., Ltd', 'F077D0': 'Xcellen', '4CC602': 'Radios, Inc.', '80711F': 'Juniper Networks', '88FD15': 'LINEEYE CO., LTD', '884B39': 'Siemens AG, Healthcare Sector', 'D828C9': 'General Electric Consumer and Industrial', '44C233': 'Guangzhou Comet Technology Development Co.Ltd', '30EFD1': 'Alstom Strongwish (Shenzhen) Co., Ltd.', 'D41F0C': 'JAI Oy', '7C2CF3': 'Secure Electrans Ltd', '081651': 'SHENZHEN SEA STAR TECHNOLOGY CO.,LTD', 'A863DF': 'DISPLAIRE CORPORATION', '183BD2': 'BYD Precision Manufacture Company Ltd.', 'E43593': 'Hangzhou GoTo technology Co.Ltd', '2C3A28': 'Fagor Electrónica', 'B45861': 'CRemote, LLC', 'B0973A': 'E-Fuel Corporation', '204E6B': 'Axxana(israel) ltd', '80F593': 'IRCO Sistemas de Telecomunicación S.A.', 'E497F0': 'Shanghai VLC Technologies Ltd. Co.', 'B40832': 'TC Communications', 'ECDE3D': 'Lamprey Networks, Inc.', '6CFFBE': 'MPB Communications Inc.', '304174': 'ALTEC LANSING LLC', '80B289': 'Forworld Electronics Ltd.', 'E83A97': 'Toshiba Corporation', '1056CA': 'Peplink International Ltd.', '486FD2': 'StorSimple Inc', 'A03A75': 'PSS Belgium N.V.', '24DBAD': 'ShopperTrak RCT Corporation', '9CEBE8': 'BizLink (Kunshan) Co.,Ltd', '040EC2': 'ViewSonic Mobile China Limited', 'C8D1D1': 'AGAiT Technology Corporation', '00DB45': 'THAMWAY CO.,LTD.', 'D49C28': 'JayBird LLC', '74F726': 'Neuron Robotics', '2872C5': 'Smartmatic Corp', 'E08FEC': 'REPOTEC CO., LTD.', 'ACE9AA': 'Hay Systems Ltd', '082AD0': 'SRD Innovations Inc.', '889821': 'TERAON', 'E0E751': 'Nintendo Co., Ltd.', '003AAF': 'BlueBit Ltd.', '64168D': 'Cisco Systems, Inc', '003A9C': 'Cisco Systems, Inc', '7C6C8F': 'AMS NEVE LTD', '9CB206': 'PROCENTEC', '88ED1C': 'Cudo Communication Co., Ltd.', '9CCD82': 'CHENG UEI PRECISION INDUSTRY CO.,LTD', 'F06281': 'ProCurve Networking by HP', 'C09C92': 'COBY', 'C038F9': 'Nokia Danmark A/S', 'F46349': 'Diffon Corporation', '74F07D': 'BnCOM Co.,Ltd', 'F852DF': 'VNL Europe AB', 'A8CB95': 'EAST BEST CO., LTD.', 'F45FF7': 'DQ Technology Inc.', '7C3BD5': 'Imago Group', '5CE223': 'Delphin Technology AG', 'F871FE': 'The Goldman Sachs Group, Inc.', '2C1984': 'IDN Telecom, Inc.', 'D8C3FB': 'DETRACOM', '58F67B': 'Xia Men UnionCore Technology LTD.', '6CF049': 'GIGA-BYTE TECHNOLOGY CO.,LTD.', '644F74': 'LENUS Co., Ltd.', '787F62': 'GiK mbH', '401597': 'Protect America, Inc.', 'C4FCE4': 'DishTV NZ Ltd', 'E80B13': 'Akib Systems Taiwan, INC', 'EC6C9F': 'Chengdu Volans Technology CO.,LTD', '40EF4C': 'Fihonest communication co.,Ltd', '00271E': 'Xagyl Communications', '00271D': 'Comba Telecom Systems (China) Ltd.', '002720': 'NEW-SOL COM', '0026F0': 'cTrixs International GmbH.', '0026EA': 'Cheerchip Electronic Technology (ShangHai) Co., Ltd.', '002708': 'Nordiag ASA', '002702': 'SolarEdge Technologies', '002704': 'Accelerated Concepts, Inc', '0026FA': 'BandRich Inc.', '0026F9': 'S.E.M. srl', '0026FD': 'Interactive Intelligence', '0026F7': 'Nivetti Systems Pvt. Ltd.', '0026F6': 'Military Communication Institute', '0026DD': 'Fival Science & Technology Co.,Ltd.', '0026DE': 'FDI MATELEC', '0026DA': 'Universal Media Corporation /Slovakia/ s.r.o.', '0026DB': 'Ionics EMS Inc.', '0026D5': 'Ory Solucoes em Comercio de Informatica Ltda.', '0026CE': 'Kozumi USA Corp.', '002711': 'LanPro Inc', '002686': 'Quantenna Communcations, Inc.', '002684': 'KISAN SYSTEM', '002680': 'SIL3 Pty.Ltd', '0026BF': 'ShenZhen Temobi Science&Tech Development Co.,Ltd', '0026B4': 'Ford Motor Company', '0026CA': 'Cisco Systems, Inc', '0026C9': 'Proventix Systems, Inc.', '002690': 'I DO IT', '00268F': 'MTA SpA', '002679': 'Euphonic Technologies, Inc.', '0026AC': 'Shanghai LUSTER Teraband photonic Co., Ltd.', '0026A6': 'TRIXELL', '00269C': 'ITUS JAPAN CO. LTD', '002694': 'Senscient Ltd', '002676': 'COMMidt AS', '00261D': 'COP SECURITY SYSTEM CORP.', '002617': 'OEM Worldwide', '002613': 'Engel Axil S.L.', '002638': 'Xia Men Joyatech Co., Ltd.', '00263A': 'Digitec Systems', '00260F': 'Linn Products Ltd', '00260C': 'Dataram', '00262B': 'Wongs Electronics Co. Ltd.', '002620': 'ISGUS GmbH', '002601': 'Cutera Inc', '002635': 'Bluetechnix GmbH', '002657': 'OOO NPP EKRA', '002652': 'Cisco Systems, Inc', '002666': 'EFM Networks', '0025F5': 'DVS Korea, Co., Ltd', '0025EB': 'Reutech Radar Systems (PTY) Ltd', '0025EE': 'Avtex Ltd', '0025AE': 'Microsoft Corporation', '0025AF': 'COMFILE Technology', '0025A8': 'Kontron (BeiJing) Technology Co.,Ltd', '0025D9': 'DataFab Systems Inc.', '0025D6': 'The Kroger Co.', '0025D1': 'Eastern Asia Technology Limited', '0025CD': 'Skylane Optics', '0025BF': 'Wireless Cables Inc.', '0025B9': 'Cypress Solutions Inc', '0025B6': 'Telecom FM', '0025A5': 'Walnut Media Network', '0025A4': 'EuroDesign embedded technologies GmbH', '0025C1': 'Nawoo Korea Corp.', '0025F6': 'netTALK.com, Inc.', '00257A': 'CAMCO Produktions- und Vertriebs-GmbH für Beschallungs- und Beleuchtungsanlagen', '002576': 'NELI TECHNOLOGIES', '002574': 'KUNIMI MEDIA DEVICE Co., Ltd.', '002559': 'Syphan Technologies Ltd', '002554': 'Pixel8 Networks', '002540': 'Quasar Technologies, Inc.', '002533': 'WITTENSTEIN AG', '002530': 'Aetas Systems Inc.', '00252C': 'Entourage Systems, Inc.', '00258C': 'ESUS ELEKTRONIK SAN. VE DIS. TIC. LTD. STI.', '00255A': 'Tantalus Systems Corp.', '002587': 'Vitality, Inc.', '002573': 'ST Electronics (Info-Security) Pte Ltd', '00256F': 'Dantherm Power', '00252F': 'Energy, Inc.', '00250A': 'Security Expert Co. Ltd', '002505': 'eks Engel GmbH & Co. KG', '002509': 'SHARETRONIC Group LTD', '0024DA': 'Innovar Systems Limited', '0024D8': 'IlSung Precision', '0024CD': 'Willow Garage, Inc.', '0024D3': 'QUALICA Inc.', '0024CE': 'Exeltech Inc', '0024CF': 'Inscape Data Corporation', '0024C6': 'Hager Electro SAS', '0024FD': 'Accedian Networks Inc', '002523': 'OCP Inc.', '00251D': 'DSA Encore, LLC', '00250F': 'On-Ramp Wireless, Inc.', '0024F5': 'NDS Surgical Imaging', '002467': 'AOC International (Europe) GmbH', '00246D': 'Weinzierl Engineering GmbH', '002470': 'AUROTECH ultrasound AS.', '00246A': 'Solid Year Co., Ltd.', '002492': 'Motorola, Broadband Solutions Group', '00248B': 'HYBUS CO., LTD.', '002484': 'Bang and Olufsen Medicom a/s', '002480': 'Meteocontrol GmbH', '00247A': 'FU YI CHENG Technology Co., Ltd.', '002476': 'TAP.tv', '0024B4': 'ESCATRONIC GmbH', '0024B1': 'Coulomb Technologies', '00249C': 'Bimeng Comunication System Co. Ltd', '002498': 'Cisco Systems, Inc', '002452': 'Silicon Software GmbH', '002453': 'Initra d.o.o.', '00244C': 'Solartron Metrology Ltd', '002448': 'SpiderCloud Wireless, Inc', '00244B': 'PERCEPTRON INC', '002428': 'EnergyICT', '002417': 'Thomson Telecom Belgium', '002416': 'Any Use', '00242A': 'Hittite Microwave Corporation', '002422': 'Knapp Logistik Automation GmbH', '00241B': 'iWOW Communications Pte Ltd', '00245E': 'Hivision Co.,ltd', '00244D': 'Hokkaido Electronics Corporation', '002442': 'Axona Limited', '002400': 'Nortel Networks', '0023FB': 'IP Datatel, LLC.', '002409': 'The Toro Company', '002406': 'Pointmobile', '00243C': 'S.A.A.A.', '0023D3': 'AirLink WiFi Networking Corp.', '0023CA': 'Behind The Set, LLC', '0023CB': 'Shenzhen Full-join Technology Co.,Ltd', '0023AD': 'Xmark Corporation', '0023AB': 'Cisco Systems, Inc', '0023A4': 'New Concepts Development Corp.', '0023DC': 'Benein, Inc', '0023D1': 'TRG', '0023FE': 'Biodevices, SA', '0023F4': 'Masternaut', '0023D0': 'Uniloc USA Inc.', '0023C7': 'AVSystem', '0023C3': 'LogMeIn, Inc.', '0023B0': 'COMXION Technology Inc.', '0023E1': 'Cavena Image Products AB', '00237E': 'ELSTER GMBH', '00237C': 'NEOTION', '00237A': 'RIM', '00236D': 'ResMed Ltd', '00236B': 'Xembedded, Inc.', '002361': 'Unigen Corporation', '00232C': 'Senticare', '00232B': 'IRD A/S', '002336': 'METEL s.r.o.', '002338': 'OJ-Electronics A/S', '00233B': 'C-Matic Systems Ltd', '00232A': 'eonas IT-Beratung und -Entwicklung GmbH', '002327': 'Shouyo Electronics CO., LTD', '002328': 'ALCON TELECOMMUNICATIONS CO., LTD.', '002372': 'MORE STAR INDUSTRIAL GROUP LIMITED', '00234C': 'KTC AB', '002343': 'TEM AG', '00235F': 'Silicon Micro Sensors GmbH', '002385': 'ANTIPODE', '0022D8': 'Shenzhen GST Security and Safety Technology Limited', '0022DC': 'Vigil Health Solutions Inc.', '0022D9': 'Fortex Industrial Ltd.', '0022D3': 'Hub-Tech', '0022D4': 'ComWorth Co., Ltd.', '0022FE': 'Advanced Illumination', '002300': 'Cayee Computer Ltd.', '0022F3': 'SHARP Corporation', '0022F6': 'Syracuse Research Corporation', '00230D': 'Nortel Networks', '002303': 'LITE-ON IT Corporation', '0022ED': 'TSI Power Corporation', '0022E1': 'ZORT Labs, LLC.', '0022E0': 'Atlantic Software Technologies S.r.L.', '0022DF': 'TAMUZ Monitors', '00231A': 'ITF Co., Ltd.', '0022BC': 'JDSU France SAS', '0022AA': 'Nintendo Co., Ltd.', '0022C9': 'Lenord, Bauer & Co GmbH', '00228C': 'Photon Europe GmbH', '00228B': 'Kensington Computer Products Group', '00228D': 'GBS Laboratories LLC', '00224C': 'Nintendo Co., Ltd.', '00224A': 'RAYLASE AG', '00224B': 'AIRTECH TECHNOLOGIES, INC.', '002256': 'Cisco Systems, Inc', '002252': 'ZOLL Lifecor Corporation', '00224D': 'MITAC INTERNATIONAL CORP.', '00229F': 'Sensys Traffic AB', '002299': 'SeaMicro Inc.', '00226D': 'Shenzhen GIEC Electronics Co., Ltd.', '00226E': 'Gowell Electronic Limited', '00225D': 'Digicable Network India Pvt. Ltd.', '00227B': 'Apogee Labs, Inc.', '002289': 'Vandelrande APC inc.', '00227D': 'YE DATA INC.', '0022A8': 'Ouman Oy', '002223': 'TimeKeeping Systems, Inc.', '00221A': 'Audio Precision', '002218': 'AKAMAI TECHNOLOGIES INC', '00223C': 'RATIO Entwicklungen GmbH', '0021E7': 'Informatics Services Corporation', '0021DC': 'TECNOALARM S.r.l.', '0021F1': 'Tutus Data AB', '0021E3': 'SerialTek LLC', '00221C': 'Private', '002222': 'Schaffner Deutschland GmbH', '0021F8': 'Enseo, Inc.', '0021F3': 'Si14 SpA', '002231': 'SMT&C Co., Ltd.', '002213': 'PCI CORPORATION', '002201': 'Aksys Networks Inc', '00217B': 'Bastec AB', '002176': 'YMax Telecom Ltd.', '002171': 'Wesung TNC Co., Ltd.', '002193': 'Videofon MV', '002192': 'Baoding Galaxy Electronic Technology Co.,Ltd', '0021A7': 'Hantle System Co., Ltd.', '00219C': 'Honeywld Technology Corp.', '00219A': 'Cambridge Visual Networks Ltd', '0021D8': 'Cisco Systems, Inc', '0021D7': 'Cisco Systems, Inc', '0021D9': 'SEKONIC CORPORATION', '0021DA': 'Automation Products Group Inc.', '0021B5': 'Galvanic Ltd', '0021BD': 'Nintendo Co., Ltd.', '0021C4': 'Consilium AB', '002189': 'AppTech, Inc.', '002122': 'Chip-pro Ltd.', '002125': 'KUK JE TONG SHIN Co.,LTD', '002126': 'Shenzhen Torch Equipment Co., Ltd.', '002113': 'Padtec S/A', '002112': 'WISCOM SYSTEM CO.,LTD', '00210E': 'Orpak Systems L.T.D.', '002161': 'Yournet Inc.', '00215F': 'IHSE GmbH', '002135': 'ALCATEL-LUCENT', '002138': 'Cepheid', '002147': 'Nintendo Co., Ltd.', '002149': 'China Daheng Group ,Inc.', '002156': 'Cisco Systems, Inc', '002151': 'Millinet Co., Ltd.', '00216C': 'ODVA', '00211C': 'Cisco Systems, Inc', '002118': 'Athena Tech, Inc.', '001FBD': 'Kyocera Wireless Corp.', '001FB9': 'Paltronics', '001FB7': 'WiMate Technologies Corp.', '001FB4': 'SmartShare Systems', '001FB6': 'Chi Lin Technology Co., Ltd.', '001FC8': 'Up-Today Industrial Co., Ltd.', '001FC5': 'Nintendo Co., Ltd.', '001FC0': 'Control Express Finland Oy', '001FBC': 'EVGA Corporation', '001FF9': 'Advanced Knowledge Associates', '001FED': 'Tecan Systems Inc.', '001FE5': 'In-Circuit GmbH', '001FF4': 'Power Monitors, Inc.', '001F5F': 'Blatand GmbH', '001F57': 'Phonik Innovation Co.,LTD', '001F59': 'Kronback Tracers', '001F4E': 'ConMed Linvatec', '001FAA': 'Taseon, Inc.', '001F7B': 'TechNexion Ltd.', '001F7D': 'Embedded Wireless GmbH', '001F8D': 'Ingenieurbuero Stark GmbH und Ko. KG', '001FAF': 'NextIO, Inc.', '001F74': 'Eigen Development', '001F75': 'GiBahn Media', '001F8F': 'Shanghai Bellmann Digital Source Co.,Ltd.', '001F84': 'Gigle Semiconductor', '001F6E': 'Vtech Engineering Corporation', '001F66': 'PLANAR LLC', '001FA0': 'A10 Networks', '001F9E': 'Cisco Systems, Inc', '001EE6': 'Shenzhen Advanced Video Info-Tech Co., Ltd.', '001EF7': 'Cisco Systems, Inc', '001F0B': 'Federal State Unitary Enterprise Industrial UnionElectropribor', '001F0C': 'Intelligent Digital Services GmbH', '001F08': 'RISCO LTD', '001F43': 'ENTES ELEKTRONIK', '001F14': 'NexG', '001EF1': 'Servimat', '001EF4': 'L-3 Communications Display Systems', '001EF5': 'Hitek Automated Inc.', '001F0E': 'Japan Kyastem Co., Ltd', '001F28': 'HPN Supply Chain', '001F1E': 'Astec Technology Co., Ltd', '001F4A': 'Albentia Systems S.A.', '001EAA': 'E-Senza Technologies GmbH', '001E9C': 'Fidustron INC', '001E97': 'Medium Link System Technology CO., LTD,', '001E8B': 'Infra Access Korea Co., Ltd.', '001EBB': 'BLUELIGHT TECHNOLOGY INC.', '001EB5': 'Ever Sparkle Technologies Ltd', '001EB3': 'Primex Wireless', '001E85': 'Lagotek Corporation', '001ED8': 'Digital United Inc.', '001ED6': 'Alentec & Orion AB', '001EC6': 'Obvius Holdings LLC', '001EC4': 'Celio Corp', '001EC1': '3COM EUROPE LTD', '001EB6': 'TAG Heuer SA', '001EAC': 'Armadeus Systems', '001E77': 'Air2App', '001E7B': 'R.I.CO. S.r.l.', '001E6E': 'Shenzhen First Mile Communications Ltd', '001E6D': 'IT R&D Center', '001E34': 'CryptoMetrics', '001E2D': 'STIM', '001E41': 'Microwave Communication & Component, Inc.', '001DFC': 'KSIC', '001E55': 'COWON SYSTEMS,Inc.', '001E56': 'Bally Wulff Entertainment GmbH', '001E3F': 'TrellisWare Technologies, Inc.', '001E57': 'ALCOMA, spol. s r.o.', '001E50': 'BATTISTONI RESEARCH', '001E11': 'ELELUX INTERNATIONAL LTD', '001DF2': 'Netflix, Inc.', '001DEF': 'TRIMM, INC.', '001DF1': 'Intego Systems, Inc.', '001DED': 'Grid Net, Inc.', '001DC3': 'RIKOR TV, Ltd', '001DC1': 'Audinate Pty L', '001DB2': 'HOKKAIDO ELECTRIC ENGINEERING CO.,LTD.', '001DAD': 'Sinotech Engineering Consultants, Inc. Geotechnical Enginee', '001DAB': 'SwissQual License AG', '001D9C': 'Rockwell Automation', '001DA0': 'Heng Yu Electronic Manufacturing Company Limited', '001DD8': 'Microsoft Corporation', '001DC8': 'Navionics Research Inc., dba SCADAmetrics', '001DCC': 'Ayon Cyber Security, Inc', '001D92': "MICRO-STAR INT'L CO.,LTD.", '001D8A': 'TechTrex Inc', '001D80': 'Beijing Huahuan Eletronics Co.,Ltd', '001D83': 'Emitech Corporation', '001D74': 'Tianjin China-Silicon Microelectronics Co., Ltd.', '001D4B': 'Grid Connect Inc.', '001D4D': 'Adaptive Recognition Hungary, Inc', '001D1E': 'KYUSHU TEN CO.,LTD', '001D1D': 'Inter-M Corporation', '001D0E': 'Agapha Technology co., Ltd.', '001D0A': 'Davis Instruments, Inc.', '001D67': 'AMEC', '001D7C': 'ABE Elettronica S.p.A.', '001D6D': 'Confidant International LLC', '001D75': 'Radioscape PLC', '001D53': 'S&O Electronics (Malaysia) Sdn. Bhd.', '001D54': 'Sunnic Technology & Merchandise INC.', '001D5F': 'OverSpeed SARL', '001D58': 'CQ Inc', '001D87': 'VigTech Labs Sdn Bhd', '001D2F': 'QuantumVision Corporation', '001CE5': 'MBS Electronic Systems GmbH', '001CDD': 'COWBELL ENGINEERING CO., LTD.', '001D05': 'Eaton Corporation', '001CC2': 'Part II Research, Inc.', '001CBD': 'Ezze Mobile Tech., Inc.', '001CB8': 'CBC Co., Ltd', '001CAC': 'Qniq Technology Corp.', '001CA5': 'Zygo Corporation', '001C9D': 'Liecthi AG', '001CFF': 'Napera Networks Inc', '001CDC': 'Custom Computer Services, Inc.', '001CD1': 'Waves Audio LTD', '001CED': 'ENVIRONNEMENT SA', '001CBA': 'VerScient, Inc.', '001CB0': 'Cisco Systems, Inc', '001C4C': 'Petrotest Instruments', '001C45': 'Chenbro Micom Co., Ltd.', '001C3C': 'Seon Design Inc.', '001C57': 'Cisco Systems, Inc', '001C5D': 'Leica Microsystems', '001C5E': 'ASTON France', '001C55': 'Shenzhen Kaifa Technology Co.', '001C5B': 'Chubb Electronic Security Systems Ltd', '001C44': 'Bosch Security Systems BV', '001C82': 'Genew Technologies', '001C84': 'STL Solution Co.,Ltd.', '001C79': 'Cohesive Financial Technologies LLC', '001C64': 'Landis+Gyr', '001C31': 'Mobile XP Technology Co., LTD', '001C8B': 'MJ Innovations Ltd.', '001C6D': 'KYOHRITSU ELECTRONIC INDUSTRY CO., LTD.', '001C96': 'Linkwise Technology Pte Ltd', '001C29': 'CORE DIGITAL ELECTRONICS CO., LTD', '001C24': 'Formosa Wireless Systems Corp.', '001C20': 'CLB Benelux', '001C1C': 'Center Communication Systems GmbH', '001BDA': 'UTStarcom Inc', '001BD1': 'SOGESTMATIC', '001C00': 'Entry Point, LLC', '001BFD': 'Dignsys Inc.', '001BD6': 'Kelvin Hughes Ltd', '001BCF': 'Dataupia Corporation', '001BCB': 'PEMPEK SYSTEMS PTY LTD', '001BF5': 'Tellink Sistemas de Telecomunicación S.L.', '001BE6': 'VR AG', '001BE2': 'AhnLab,Inc.', '001BE0': 'TELENOT ELECTRONIC GmbH', '001C34': 'HUEY CHIAO INTERNATIONAL CO., LTD.', '001C36': 'iNEWiT NV', '001C15': 'iPhotonix LLC', '001C07': 'Cwlinux Limited', '001BA3': 'Flexit Group GmbH', '001B9F': 'Calyptech Pty Ltd', '001B9A': 'Apollo Fire Detectors Ltd', '001BBD': 'FMC Kongsberg Subsea AS', '001BBE': 'ICOP Digital', '001BB3': 'Condalo GmbH', '001BB7': 'Alta Heights Technology Corp.', '001B99': 'KS System GmbH', '001B8F': 'Cisco Systems, Inc', '001B8C': 'JMicron Technology Corp.', '001B91': 'EFKON AG', '001B82': 'Taiwan Semiconductor Co., Ltd.', '001B85': 'MAN Diesel SE', '001B89': 'EMZA Visual Sense Ltd.', '001B8B': 'NEC Platforms, Ltd.', '001BAC': 'Curtiss Wright Controls Embedded Computing', '001B6A': 'Powerwave Technologies Sweden AB', '001B67': 'Cisco Systems Inc', '001B7B': 'The Tintometer Ltd', '001B75': 'Hypermedia Systems', '001B27': 'Merlin CSI', '001B1B': 'Siemens AG,', '001B1F': 'DELTA - Danish Electronics, Light & Acoustics', '001B57': 'SEMINDIA SYSTEMS PRIVATE LIMITED', '001B55': 'Hurco Automation Ltd.', '001B53': 'Cisco Systems, Inc', '001B49': 'Roberts Radio limited', '001B0E': 'InoTec GmbH Organisationssysteme', '001B04': 'Affinity International S.p.a', '001B06': 'Ateliers R. LAUMONIER', '001B31': 'Neural Image. Co. Ltd.', '001B29': 'Avantis.Co.,Ltd', '001B2B': 'Cisco Systems, Inc', '001B28': 'POLYGON, JSC', '001B43': 'Beijing DG Telecommunications equipment Co.,Ltd', '001B3C': 'Software Technologies Group,Inc.', '001B3D': 'EuroTel Spa', '001B66': 'Sennheiser electronic GmbH & Co. KG', '001B60': 'NAVIGON AG', '001B05': 'YMC AG', '001AFF': 'Wizyoung Tech.', '001B00': 'Neopost Technologies', '001B4E': 'Navman New Zealand', '001B16': 'Celtro Ltd.', '001AE6': 'Atlanta Advanced Communications Holdings Limited', '001AD9': 'International Broadband Electric Communications, Inc.', '001AC8': 'ISL (Instrumentation Scientifique de Laboratoire)', '001AAB': 'eWings s.r.l.', '001AAC': 'Corelatus AB', '001AAF': 'BLUSENS TECHNOLOGY', '001AB0': 'Signal Networks Pvt. Ltd.,', '001AA1': 'Cisco Systems, Inc', '001AC6': 'Micro Control Designs', '001AF2': 'Dynavisions Schweiz AG', '001A82': 'PROBA Building Automation Co.,LTD', '001A7C': 'Hirschmann Multimedia B.V.', '001A7A': 'Lismore Instruments Limited', '001A78': 'ubtos', '001A76': 'SDT information Technology Co.,LTD.', '001A70': 'Cisco-Linksys, LLC', '001A61': 'PacStar Corp.', '001A62': 'Data Robotics, Incorporated', '001A65': 'Seluxit', '001A54': 'Hip Shing Electronics Ltd.', '001A42': 'Techcity Technology co., Ltd.', '001A50': 'PheeNet Technology Corp.', '001A53': 'Zylaya', '001A4C': 'Crossbow Technology, Inc', '001A1A': 'Gentex Corporation/Electro-Acoustic Products', '001A12': 'Essilor', '001A7D': 'cyber-blue(HK)Ltd', '001A60': 'Wave Electronics Co.,Ltd.', '001A56': 'ViewTel Co,. Ltd.', '0019F3': 'Cetis, Inc', '0019F5': 'Imagination Technologies Ltd', '0019EF': 'SHENZHEN LINNKING ELECTRONICS CO.,LTD', '0019F7': 'Onset Computer Corporation', '0019EE': 'CARLO GAVAZZI CONTROLS SPA-Controls Division', '0019BF': 'Citiway technology Co.,ltd', '0019B6': 'Euro Emme s.r.l.', '0019EB': 'Pyronix Ltd', '0019E7': 'Cisco Systems, Inc', '0019E9': 'S-Information Technolgy, Co., Ltd.', '0019DA': 'Welltrans O&E Technology Co. , Ltd.', '001A14': 'Xin Hua Control Engineering Co.,Ltd.', '001A10': 'LUCENT TRANS ELECTRONICS CO.,LTD', '001A0C': 'Swe-Dish Satellite Systems AB', '0019DC': 'ENENSYS Technologies', '0019D0': 'Cathexis', '0019D6': 'LS Cable and System Ltd.', '0019D7': 'FORTUNETEK CO., LTD', '001A02': 'SECURE CARE PRODUCTS, INC', '0019F8': 'Embedded Systems Design, Inc.', '001A07': 'Arecont Vision', '001A08': 'Simoco Ltd.', '001A04': 'Interay Solutions BV', '0019C9': 'S&C ELECTRIC COMPANY', '0019B5': 'Famar Fueguina S.A.', '0019B8': 'Boundary Devices', '00195D': 'ShenZhen XinHuaTong Opto Electronics Co.,Ltd', '001953': 'Chainleader Communications Corp.', '001955': 'Cisco Systems, Inc', '001959': 'Staccato Communications Inc.', '00194D': 'Avago Technologies Sdn Bhd', '00194E': 'Ultra Electronics - TCS (Tactical Communication Systems)', '0019AC': 'GSP SYSTEMS Inc.', '0019B0': 'HanYang System', '001995': 'Jurong Hi-Tech (Suzhou)Co.ltd', '00199A': 'EDO-EVI', '001994': 'Jorjin Technologies Inc.', '00197C': 'Riedel Communications GmbH', '0019A0': 'NIHON DATA SYSTENS, INC.', '001991': 'avinfo', '00198C': 'iXSea', '001962': 'Commerciant, LP', '00196F': 'SensoPart GmbH', '0018DD': 'Silicondust Engineering Ltd', '0018DF': 'The Morey Corporation', '0018E1': 'Verkerk Service Systemen', '0018DA': 'Würth Elektronik eiSos GmbH & Co. KG', '0018D5': 'REIGNCOM', '0018E7': 'Cameo Communications, INC.', '0018E4': 'YIGUANG', '0018E5': 'Adhoco AG', '00190A': 'HASWARE INC.', '001923': 'Phonex Korea Co., LTD.', '00191C': 'Sensicast Systems', '001928': 'Siemens AG, Transportation Systems', '001915': 'TECOM Co., Ltd.', '00191B': 'Sputnik Engineering AG', '001909': 'DEVI - Danfoss A/S', '001933': 'Strix Systems, Inc.', '001904': 'WB Electronics Sp. z o.o.', '001934': 'TRENDON TOUCH TECHNOLOGY CORP.', '00190D': 'IEEE 1394c', '001881': 'Buyang Electronics Industrial Co., Ltd', '001876': 'WowWee Ltd.', '00187A': 'Wiremold', '00186C': 'Neonode AB', '001873': 'Cisco Systems, Inc', '00185F': 'TAC Inc.', '0018D4': 'Unified Display Interface SIG', '0018A7': 'Yoggie Security Systems LTD.', '0018A2': 'XIP Technology AB', '00189C': 'Weldex Corporation', '00189A': 'HANA Micron Inc.', '001864': 'Eaton Corporation', '001866': 'Leutron Vision', '001860': 'SIM Technology Group Shanghai Simcom Ltd.,', '001897': 'JESS-LINK PRODUCTS Co., LTD', '00189E': 'OMNIKEY GmbH.', '0018A9': 'Ethernet Direct Corporation', '0018A8': 'AnNeal Technology Inc.', '0018C2': 'Firetide, Inc', '00183E': 'Digilent, Inc', '001842': 'Nokia Danmark A/S', '001840': '3 Phoenix, Inc.', '001844': 'Heads Up Technologies, Inc.', '001821': 'SINDORICOH', '001823': 'Delta Electronics, Inc.', '001815': 'GZ Technologies, Inc.', '001857': 'Unilever R&D', '001853': 'Atera Networks LTD.', '001859': 'Strawberry Linux Co.,Ltd.', '00184F': '8 Ways Technology Corp.', '001806': 'Hokkei Industries Co., Ltd.', '001818': 'Cisco Systems, Inc', '00181A': 'AVerMedia Information Inc.', '001816': 'Ubixon Co., Ltd.', '00182B': 'Softier', '001829': 'Gatsometer', '001838': 'PanAccess Communications,Inc.', '0017F0': 'SZCOM Broadband Network Technology Co.,Ltd', '0017F1': 'Renu Electronics Pvt Ltd', '0017FF': 'PLAYLINE Co.,Ltd.', '0017F6': 'Pyramid Meriden Inc.', '0017C5': 'SonicWALL', '0017BE': 'Tratec Telecom B.V.', '0017C0': 'PureTech Systems, Inc.', '0017BA': 'SEDO CO., LTD.', '0017D4': 'Monsoon Multimedia, Inc', '001780': 'Applied Biosystems B.V.', '0017A5': 'Ralink Technology Corp', '0017A8': 'EDM Corporation', '0017A9': 'Sentivision', '0017AF': 'Enermet', '0017AA': 'elab-experience inc.', '0017AE': 'GAI-Tronics', '0017B5': 'Peerless Systems Corporation', '00176C': 'Pivot3, Inc.', '001770': 'Arti Industrial Electronics Ltd.', '00178B': 'Teledyne Technologies Incorporated', '0017DF': 'Cisco Systems, Inc', '001746': 'Freedom9 Inc.', '00174D': 'DYNAMIC NETWORK FACTORY, INC.', '001744': 'Araneo Ltd.', '001749': 'HYUNDAE YONG-O-SA CO.,LTD', '001743': 'Deck Srl', '001728': 'Selex Communications', '001722': 'Hanazeder Electronic GmbH', '00173D': 'Neology', '001740': 'Bluberi Gaming Technologies Inc', '001732': 'Science-Technical Center RISSA', '00176D': 'CORE CORPORATION', '001773': 'Laketune Technologies Co. Ltd', '00173A': 'Cloudastructure Inc', '00172F': 'NeuLion Incorporated', '001723': 'Summit Data Communications', '00171F': 'IMV Corporation', '001753': 'nFore Technology Inc.', '001757': 'RIX TECHNOLOGY LIMITED', '0016BB': 'Law-Chain Computer Technology Co Ltd', '0016B4': 'Private', '0016A7': 'AWETA G&P', '001710': 'Casa Systems Inc.', '0016BF': 'PaloDEx Group Oy', '0016B7': 'Seoul Commtech', '0016E1': 'SiliconStor, Inc.', '0016E2': 'American Fibertek, Inc.', '001713': 'Tiger NetCom', '0016CD': 'HIJI HIGH-TECH CO., LTD.', '0016EF': 'Koko Fitness, Inc.', '0016FD': 'Jaty Electronics', '001678': 'SHENZHEN BAOAN GAOKE ELECTRONICS CO., LTD', '001674': 'EuroCB (Phils.), Inc.', '001670': 'SKNET Corporation', '001689': 'Pilkor Electronics Co., Ltd', '00168A': 'id-Confirm Inc', '001686': 'Karl Storz Imaging', '00168D': 'KORWIN CO., Ltd.', '00168E': 'Vimicro corporation', '00164F': 'World Ethnic Broadcastin Inc.', '00167E': 'DIBOSS.CO.,LTD', '00167B': 'Haver&Boecker', '001679': 'eOn Communications', '0016A3': 'Ingeteam Transmission&Distribution, S.A.', '0016A0': 'Auto-Maskin', '00165D': 'AirDefense, Inc.', '00165B': 'Grip Audio', '001667': 'A-TEC Subsystem INC.', '00164A': 'Vibration Technology Limited', '001645': 'Power Distribution, Inc.', '00163F': 'CReTE SYSTEMS Inc.', '00163D': 'Tsinghua Tongfang Legend Silicon Tech. Co., Ltd.', '00162D': 'STNet Co., Ltd.', '001627': 'embedded-logic DESIGN AND MORE GmbH', '00163A': 'YVES TECHNOLOGY CO., LTD.', '001638': 'TECOM Co., Ltd.', '001633': 'Oxford Diagnostics Ltd.', '0015EC': 'Boca Devices LLC', '0015EF': 'NEC TOKIN Corporation', '0015E4': 'Zimmer Elektromedizin', '001622': 'BBH SYSTEMS GMBH', '001613': 'LibreStream Technologies Inc.', '00160F': 'BADGER METER INC', '001604': 'Sigpro', '0015FB': 'setex schermuly textile computer gmbh', '0015FE': 'SCHILLING ROBOTICS LLC', '00160D': 'Be Here Corporation', '0015C9': 'Gumstix, Inc', '0015BA': 'iba AG', '0015BB': 'SMA Solar Technology AG', '0015BF': 'technicob', '001582': 'Pulse Eight Limited', '00157B': 'Leuze electronic GmbH + Co. KG', '001578': 'Audio / Video Innovations', '0015AE': 'kyung il', '0015BC': 'Develco', '0015BD': 'Group 4 Technology Ltd', '0015B5': 'CI Network Corp.', '0015CD': 'Exartech International Corp.', '0015D9': 'PKC Electronics Oy', '0015DD': 'IP Control Systems Ltd.', '00159C': 'B-KYUNG SYSTEM Co.,Ltd.', '001595': 'Quester Tangent Corporation', '001587': 'Takenaka Seisakusho Co.,Ltd', '00151A': 'Hunter Engineering Company', '001514': 'Hu Zhou NAVA Networks&Electronics Ltd.', '001516': 'URIEL SYSTEMS INC.', '001545': 'SEECODE Co., Ltd.', '001543': 'Aberdeen Test Center', '001541': 'StrataLight Communications, Inc.', '001569': 'PECO II, Inc.', '001564': 'BEHRINGER Spezielle Studiotechnik GmbH', '001563': 'Cisco Systems, Inc', '001561': 'JJPlus Corporation', '001571': 'Nolan Systems', '001565': 'XIAMEN YEALINK NETWORK TECHNOLOGY CO.,LTD', '001538': 'RFID, Inc.', '00152E': 'PacketHop, Inc.', '001558': 'FOXCONN', '00151F': 'Multivision Intelligent Surveillance (Hong Kong) Ltd', '001522': 'Dea Security', '00154F': 'one RF Technology', '001501': 'LexBox', '0014A3': 'Vitelec BV', '001497': 'ZHIYUAN Eletronics co.,ltd.', '001499': 'Helicomm Inc', '001492': 'Liteon, Mobile Media Solution SBU', '001494': 'ESU AG', '0014EF': 'TZero Technologies, Inc.', '0014BC': 'SYNECTIC TELECOM EXPORTS PVT. LTD.', '0014B9': 'MSTAR SEMICONDUCTOR', '0014BD': 'incNETWORKS, Inc', '0014B7': 'AR Infotek Inc.', '0014CA': 'Key Radio Systems Limited', '0014C8': 'Contemporary Research Corp', '0014C5': 'Alive Technologies Pty Ltd', '0014DA': 'Huntleigh Healthcare', '0014D5': 'Datang Telecom Technology CO. , LCD,Optical Communication Br', '0014D3': 'SEPSA', '0014D8': 'bio-logic SA', '0014EE': 'Western Digital Technologies, Inc.', '0014B5': 'PHYSIOMETRIX,INC', '00149C': 'HF Company', '00145C': 'Intronics B.V.', '00145A': 'Neratec Solutions AG', '00145B': 'SeekerNet Inc.', '001449': 'Sichuan Changhong Electric Ltd.', '001446': 'SuperVision Solutions LLC', '001440': 'ATOMIC Corporation', '00142E': '77 Elektronika Kft.', '001430': 'ViPowER, Inc', '001432': 'Tarallax Wireless, Inc.', '001485': 'Giga-Byte', '001483': 'eXS Inc.', '001471': 'Eastern Asia Technology Limited', '00146A': 'Cisco Systems, Inc', '00144C': 'General Meters Corp.', '001443': 'Consultronics Europe Ltd', '001460': 'Kyocera Wireless Corp.', '0013F5': 'Akimbi Systems', '0013F1': 'AMOD Technology Co., Ltd.', '0013E7': 'Halcro', '0013EA': 'Kamstrup A/S', '0013E1': 'Iprobe AB', '0013E3': 'CoVi Technologies, Inc.', '0013E4': 'YANGJAE SYSTEMS CORP.', '0013C1': 'Asoka USA Corporation', '0013C0': 'Trix Tecnologia Ltda.', '00141F': 'SunKwang Electronics Co., Ltd', '00141D': 'LTI-Motion GmbH', '001412': 'S-TEC electronics AG', '001411': 'Deutschmann Automation GmbH & Co. KG', '001405': 'OpenIB, Inc.', '004501': 'Midmark RTLS', '001403': 'Renasis, LLC', '001401': 'Rivertree Networks Corp.', '0013D9': 'Matrix Product Development, Inc.', '0013CC': 'Tall Maple Systems', '001394': 'Infohand Co.,Ltd', '001389': 'Redes de Telefonía Móvil S.A.', '00138C': 'Kumyoung.Co.Ltd', '0013A1': 'Crow Electronic Engeneering', '00139C': 'Exavera Technologies, Inc.', '001355': 'TOMEN Cyber-business Solutions, Inc.', '001357': 'Soyal Technology Co., Ltd.', '001360': 'Cisco Systems, Inc', '00138E': 'FOAB Elektronik AB', '001376': 'Tabor Electronics Ltd.', '00135D': 'NTTPC Communications, Inc.', '001352': 'Naztec, Inc.', '00136C': 'TomTom', '001364': 'Paradigm Technology Inc..', '00136B': 'E-TEC', '001380': 'Cisco Systems, Inc', '001385': 'Add-On Technology Co., LTD.', '0013B3': 'Ecom Communications Technology Co., Ltd.', '0013B7': 'Scantech ID', '001308': 'Nuvera Fuel Cells', '001307': 'Paravirtual Corporation', '0012FC': 'PLANET System Co.,LTD', '0012FE': 'Lenovo Mobile Communication Technology Ltd.', '0012EC': 'Movacolor b.v.', '0012EB': 'PDH Solutions, LLC', '001343': 'Matsushita Electronic Components (Europe) GmbH', '00133E': 'MetaSwitch', '001345': 'Eaton Corporation', '001347': 'Red Lion Controls, LP', '00132E': 'ITian Coporation', '001339': 'CCV Deutschland GmbH', '001329': 'VSST Co., LTD', '00132B': 'Phoenix Digital', '00130B': 'Mextal B.V.', '00130D': 'GALILEO AVIONICA', '0012F7': 'Xiamen Xinglian Electronics Co., Ltd.', '00131B': 'BeCell Innovations Corp.', '0012E2': 'ALAXALA Networks Corporation', '0012DF': 'Novomatic AG', '0012D4': 'Princeton Technology, Ltd', '0012D9': 'Cisco Systems, Inc', '0012C2': 'Apex Electronics Factory', '0012BE': 'Astek Corporation', '0012E1': 'Alliant Networks, Inc', '0012C5': 'V-Show Technology (China) Co.,Ltd', '0012AD': 'IDS GmbH', '0012A0': 'NeoMeridian Sdn Bhd', '001286': 'ENDEVCO CORP', '001263': 'Data Voice Technologies GmbH', '001270': 'NGES Denro Systems', '00126E': 'Seidel Elektronik GmbH Nfg.KG', '00126F': 'Rayson Technology Co., Ltd.', '00125C': 'Green Hills Software, Inc.', '00125F': 'AWIND Inc.', '00122D': 'SiNett Corporation', '001274': 'NIT lab', '001253': 'AudioDev AB', '00124C': 'BBWM Corporation', '001255': 'NetEffect Incorporated', '001244': 'Cisco Systems, Inc', '00122C': 'Soenen Controls N.V.', '00122B': 'Virbiage Pty Ltd', '001232': 'LeWiz Communications Inc.', '001227': 'Franklin Electric Co., Inc.', '00121A': 'Techno Soft Systemnics Inc.', '00121B': 'Sound Devices, LLC', '0011DF': 'Current Energy', '0011D7': 'eWerks Inc', '001204': 'u10 Networks, Inc.', '00120A': 'Emerson Climate Technologies GmbH', '001208': 'Gantner Instruments GmbH', '001201': 'Cisco Systems, Inc', '0011EE': 'Estari, Inc.', '001200': 'Cisco Systems, Inc', '0011E5': 'KCodes Corporation', '0011DE': 'EURILOGIC', '0011C8': 'Powercom Co., Ltd.', '0011B8': 'Liebherr - Elektronik GmbH', '0011B4': 'Westermo Network Technologies AB', '001178': 'Chiron Technology Ltd', '00116A': 'Domo Ltd', '001193': 'Cisco Systems, Inc', '00118C': 'Missouri Department of Transportation', '00118E': 'Halytech Mace', '001186': 'Prime Systems, Inc.', '001183': 'Datalogic ADC, Inc.', '00117D': 'ZMD America, Inc.', '00117F': 'Neotune Information Technology Corporation,.LTD', '00119D': 'Diginfo Technology Corporation', '00119C': 'EP&T Energy', '00119A': 'Alkeria srl', '0011C1': '4P MOBILE DATA PROCESSING', '0011B2': '2001 Technology Inc.', '0011AF': 'Medialink-i,Inc', '00115E': 'ProMinent Dosiertechnik GmbH', '00117C': 'e-zy.net', '001139': 'STOEBER ANTRIEBSTECHNIK GmbH + Co. KG.', '00113E': 'JL Corporation', '001138': 'TAISHIN CO., LTD.', '001136': 'Goodrich Sensor Systems', '001132': 'Synology Incorporated', '00114B': 'Francotyp-Postalia GmbH', '001147': 'Secom-Industry co.LTD.', '00114A': 'KAYABA INDUSTRY Co,.Ltd.', '001142': 'e-SMARTCOM INC.', '001120': 'Cisco Systems, Inc', '001118': 'BLX IC Design Corp., Ltd.', '001107': 'RGB Networks Inc.', '001108': 'Orbital Data Corporation', '00110C': 'Atmark Techno, Inc.', '00114D': 'Atsumi Electric Co.,LTD.', '001130': 'Allied Telesis (Hong Kong) Ltd.', '001129': 'Paradise Datacom Ltd.', '001157': 'Oki Electric Industry Co., Ltd.', '000FD9': 'FlexDSL Telecommunications AG', '000FD0': 'ASTRI', '000FCA': 'A-JIN TECHLINE CO, LTD', '000FEC': 'ARKUS Inc.', '000FED': 'Anam Electronics Co., Ltd', '000FE5': 'MERCURY SECURITY CORPORATION', '000FAE': 'E2O Communications', '000FB1': 'Cognio Inc.', '000FA7': 'Raptor Networks Technology', '000FF0': 'Sunray Co. Ltd.', '001103': 'kawamura electric inc.', '000FDD': 'SORDIN AB', '000FC3': 'PalmPalm Technology, Inc.', '000F88': 'AMETEK, Inc.', '000F7E': 'Ablerex Electronics Co., LTD', '000F83': 'Brainium Technologies Inc.', '000F84': 'Astute Networks, Inc.', '000F5C': 'Day One Digital Media Limited', '000F52': 'YORK Refrigeration, Marine & Controls', '000F4C': 'Elextech INC', '000F93': 'Landis+Gyr Ltd.', '000F94': 'Genexis BV', '000F6C': 'ADDI-DATA GmbH', '000F8F': 'Cisco Systems, Inc', '000F7D': 'Xirrus', '000F76': 'Digital Keystone, Inc.', '000F9E': 'Murrelektronik GmbH', '000F79': 'Bluetooth Interest Group Inc.', '000F17': 'Insta Elektro GmbH', '000F1E': 'Chengdu KT Electric Co.of High & New Technology', '000F15': 'Icotera A/S', '000F39': 'IRIS SENSORS', '000F3E': 'CardioNet, Inc', '000F3F': 'Big Bear Networks', '000F35': 'Cisco Systems, Inc', '000F36': 'Accurate Techhnologies, Inc.', '000F28': 'Itronix Corporation', '000F23': 'Cisco Systems, Inc', '000F22': 'Helius, Inc.', '000F24': 'Cisco Systems, Inc', '000EFC': 'JTAG Technologies B.V.', '000EFE': 'EndRun Technologies LLC', '000F2C': 'Uplogix, Inc.', '000F2B': 'GREENBELL SYSTEMS', '000EF3': 'Smarthome', '000EF2': 'Infinico Corporation', '000F0D': 'Hunt Electronic Co., Ltd.', '000F08': 'Indagon Oy', '000F04': 'cim-usa inc', '000F42': 'Xalyo Systems', '000EA5': 'BLIP Systems', '000EA0': 'NetKlass Technology Inc.', '000EE4': 'BOE TECHNOLOGY GROUP CO.,LTD', '000EDE': 'REMEC, Inc.', '000E9C': 'Benchmark Electronics ', '000E9A': 'BOE TECHNOLOGY GROUP CO.,LTD', '000E90': 'PONICO CORP.', '000E8A': 'Avara Technologies Pty. Ltd.', '000EB8': 'Iiga co.,Ltd', '000EBE': 'B&B Electronics Manufacturing Co.', '000EBB': 'Everbee Networks', '000ECE': 'S.I.T.T.I. S.p.A.', '000ED4': 'CRESITT INDUSTRIE', '000ED6': 'Cisco Systems, Inc', '000ED8': 'Positron Access Solutions Corp', '000ED1': 'Osaka Micro Computer.', '000EB0': 'Solutions Radio BV', '000E83': 'Cisco Systems, Inc', '000E81': 'Devicescape Software, Inc.', '000E88': 'VIDEOTRON CORP.', '000E86': 'Alcatel North America', '000E69': 'China Electric Power Research Institute', '000E61': 'MICROTROL LIMITED', '000E64': 'Elphel, Inc', '000E49': 'Forsway Scandinavia AB', '000E42': 'Motic Incoporation Ltd.', '000E3D': 'Televic N.V.', '000E39': 'Cisco Systems, Inc', '000E34': 'NexGen City, LP', '000E6A': '3Com Ltd', '000E5A': 'TELEFIELD inc.', '000E5D': 'Triple Play Technologies A/S', '000E52': 'Optium Corporation', '000E7E': 'ionSign Oy', '000E77': 'Decru, Inc.', '000E4D': 'Numesa Inc.', '000E4C': 'Bermai Inc.', '000E2D': 'Hyundai Digital Technology Co.,Ltd.', '000E30': 'AERAS Networks, Inc.', '000E29': 'Shester Communications Inc', '000DE1': 'Control Products, Inc.', '000DD0': 'TetraTec Instruments GmbH', '000DD3': 'SAMWOO Telecommunication Co.,Ltd.', '000DD8': 'BBN', '000DB3': 'SDO Communication Corperation', '000DAE': 'SAMSUNG HEAVY INDUSTRIES CO., LTD.', '000DB2': 'Ammasso, Inc.', '000E0D': 'Hesch Schröder GmbH', '000DF6': 'Technology Thesaurus Corp.', '000DFF': 'CHENMING MOLD INDUSTRY CORP.', '000DC7': 'COSMIC ENGINEERING INC.', '000DC2': 'Private', '000DBF': 'TekTone Sound & Signal Mfg., Inc.', '000E19': 'LogicaCMG Pty Ltd', '000E1A': 'JPS Communications', '000E06': 'Team Simoco Ltd', '000D5C': 'Robert Bosch GmbH, VT-ATMO', '000D60': 'IBM Corp', '000D67': 'Ericsson', '000D5F': 'Minds Inc', '000D9C': 'Elan GmbH & Co KG', '000D98': 'S.W.A.C. Schmitt-Walter Automation Consult GmbH', '000D8C': 'Shanghai Wedone Digital Ltd. CO.', '000D94': 'AFAR Communications,Inc', '000DAA': 'S.A.Tehnology co.,Ltd.', '000DA6': 'Universal Switching Corporation', '000D8D': 'Prosoft Technology, Inc', '000D84': 'Makus Inc.', '000D73': 'Technical Support, Inc.', '000D69': 'TMT&D Corporation', '000DA2': 'Infrant Technologies, Inc.', '000D68': 'Vinci Systems, Inc.', '000D64': 'COMAG Handels AG', '000D74': 'Sand Network Systems, Inc.', '000D7D': 'Afco Systems', '000D54': '3Com Ltd', '000D4C': 'Outline Electronics Ltd.', '000D0F': 'Finlux Ltd', '000D12': 'AXELL Corporation', '000D34': 'Shell International Exploration and Production, Inc.', '000D32': 'DispenseSource, Inc.', '000CF6': 'Sitecom Europe BV', '000CF2': 'GAMESA Eólica', '000D2E': 'Matsushita Avionics Systems Corporation', '000D28': 'Cisco Systems, Inc', '000D4D': 'Ninelanes', '000D53': 'Beijing 5w Communication Corp.', '000CFC': 'S2io Technologies Corp', '000D0A': 'Projectiondesign as', '000D38': 'NISSIN INC.', '000D15': 'Voipac s.r.o.', '000CA1': 'SIGMACOM Co., LTD.', '000CA6': 'Mintera Corporation', '000CC1': 'Eaton Corporation', '000C8B': 'Connect Tech Inc', '000C90': 'Octasic Inc.', '000C8C': 'KODICOM CO.,LTD.', '000CC2': 'ControlNet (India) Private Limited', '000C94': 'United Electronic Industries, Inc. (EUI)', '000C9B': 'EE Solutions, Inc', '000CE1': 'The Open Group', '000CDC': 'BECS Technology, Inc', '000CC5': 'Nextlink Co., Ltd.', '000CDE': 'ABB STOTZ-KONTAKT GmbH', '000CBF': 'Holy Stone Ent. Co., Ltd.', '000C4D': 'Curtiss-Wright Controls Avionics & Electronics', '000C44': 'Automated Interfaces, Inc.', '000C3B': 'Orion Electric Co., Ltd.', '000C71': 'Wybron, Inc', '000C72': 'Tempearl Industrial Co., Ltd.', '000C78': 'In-Tech Electronics Limited', '000C59': 'Indyme Electronics, Inc.', '000C5C': 'GTN Systems B.V.', '000C55': 'Microlink Communications Inc.', '000C52': 'Roll Systems Inc.', '000C74': 'RIVERTEC CORPORATION', '000C67': 'OYO ELECTRIC CO.,LTD', '000C2E': 'Openet information technology(shenzhen) Co., Ltd.', '000C2C': 'Enwiser Inc.', '000C28': 'RIFATRON', '000C3D': 'Glsystech Co., Ltd.', '000C2F': 'SeorimTechnology Co.,Ltd.', '000C31': 'Cisco Systems, Inc', '000C53': 'Private', '000C48': 'QoStek Corporation', '000C63': 'Zenith Electronics Corporation', '000BBB': 'Etin Systems Co., Ltd', '000BBC': 'En Garde Systems, Inc.', '000BB1': 'Super Star Technology Co., Ltd.', '000BBF': 'Cisco Systems, Inc', '000BAD': 'PC-PoS Inc.', '000BA0': 'T&L Information Inc.', '000C17': 'AJA Video Systems Inc', '000C11': 'NIPPON DEMPA CO.,LTD.', '000C14': 'Diagnostic Instruments, Inc.', '000BB5': 'nStor Technologies, Inc.', '000BB9': 'Imsys AB', '000C21': 'Faculty of Science and Technology, Keio University', '000C1B': 'ORACOM Co, Ltd.', '000BC9': 'Electroline Equipment', '000BC2': 'Corinex Communication Corp.', '000BF7': 'NIDEK CO.,LTD', '000C00': 'BEB Industrie-Elektronik AG', '000BF3': 'BAE SYSTEMS', '000BED': 'ELM Inc.', '000B91': 'Aglaia Gesellschaft für Bildverarbeitung und Kommunikation mbH', '000B97': 'Matsushita Electric Industrial Co.,Ltd.', '000B92': 'Ascom Danmark A/S', '000B9A': 'Shanghai Ulink Telecom Equipment Co. Ltd.', '000B9D': 'TwinMOS Technologies Inc.', '000B96': 'Innotrac Diagnostics Oy', '000B56': 'Cybernetics', '000B50': 'Oxygnet', '000B52': 'JOYMAX ELECTRONICS CO. LTD.', '000B69': 'Franke Finland Oy', '000BA5': 'Quasar Cipta Mandiri, PT', '000B49': 'RF-Link System Inc.', '000B46': 'Cisco Systems, Inc', '000B7A': 'L-3 Linkabit', '000B84': 'BODET', '000B77': 'Cogent Systems, Inc.', '000B66': 'Teralink Communications', '000B2C': 'Eiki Industrial Co. Ltd.', '000B26': 'Wetek Corporation', '000B28': 'Quatech Inc.', '000B2A': 'HOWTEL Co., Ltd.', '000B30': 'Beijing Gongye Science & Technology Co.,Ltd', '000AF2': 'NeoAxiom Corp.', '000AF5': 'Airgo Networks, Inc.', '000AF0': 'SHIN-OH ELECTRONICS CO., LTD. R&D', '000AF4': 'Cisco Systems, Inc', '000AEE': 'GCD Hard- & Software GmbH', '000AD8': 'IPCserv Technology Corp.', '000AD7': 'Origin ELECTRIC CO.,LTD.', '000B38': 'Knürr GmbH', '000B32': 'VORMETRIC, INC.', '000B07': 'Voxpath Networks', '000B04': 'Volktek Corporation', '000AF9': 'HiConnect, Inc.', '000B1F': 'I CON Computer Co.', '000B13': 'ZETRON INC', '000B10': '11wave Technonlogy Co.,Ltd', '000AE9': 'AirVast Technology Inc.', '000B2B': 'HOSTNET CORPORATION', '000B02': 'Dallmeier electronic', '000ACD': 'Sunrich Technology Limited', '000ACC': 'Winnow Networks, Inc.', '000ACF': 'PROVIDEO Multimedia Co. Ltd.', '000AD1': 'MWS', '000A7D': 'Valo, Inc.', '000A7F': 'Teradon Industries, Inc', '000A81': 'TEIMA Audiotex S.L.', '000A87': 'Integrated Micromachines Inc.', '000A77': 'Bluewire Technologies LLC', '000A8C': 'Guardware Systems Ltd.', '000A96': 'MEWTEL TECHNOLOGY INC.', '000A82': 'TATSUTA SYSTEM ELECTRONICS CO.,LTD.', '000AD3': 'INITECH Co., Ltd', '000AC8': 'ZPSYS CO.,LTD. (Planning&Management)', '000AC6': 'Overture Networks.', '000AAB': 'Toyota Technical Development Corporation', '000AB4': 'ETIC Telecommunications', '000A7A': 'Kyoritsu Electric Co., Ltd.', '000A9C': 'Server Technology, Inc.', '000A75': 'Caterpillar, Inc', '000A12': 'Azylex Technology, Inc', '000A13': 'Honeywell Video Systems', '000A09': 'TaraCom Integrated Products, Inc.', '000A41': 'Cisco Systems, Inc', '000A36': 'Synelec Telecom Multimedia', '000A48': 'Albatron Technology', '000A3E': 'EADS Telecom', '000A59': 'HW server', '000A54': 'Laguna Hills, Inc.', '000A4F': 'Brain Boxes Limited', '000A52': 'AsiaRF Ltd.', '000A02': 'ANNSO CO., LTD.', '000A65': 'GentechMedia.co.,ltd.', '000A22': 'Amperion Inc', '000A1C': 'Bridge Information Co., Ltd.', '000A32': 'Xsido Corporation', '000A2B': 'Etherstuff', '000A42': 'Cisco Systems, Inc', '000A38': 'Apani Networks', '0009A8': 'Eastmode Pte Ltd', '0009AA': 'Data Comm for Business, Inc.', '0009A4': 'HARTEC Corporation', '0009A6': 'Ignis Optics, Inc.', '0009A7': 'Bang & Olufsen A/S', '0009C8': 'SINAGAWA TSUSHIN KEISOU SERVICE', '0009B7': 'Cisco Systems, Inc', '0009B2': 'L&F Inc.', '0009F3': 'WELL Communication Corp.', '0009EF': 'Vocera Communications', '0009C4': 'Medicore Co., Ltd', '0009D9': 'Neoscale Systems, Inc', '0009D0': 'Solacom Technologies Inc.', '0009CC': 'Moog GmbH', '000A00': 'Mediatek Corp.', '0009F0': 'Shimizu Technology Inc.', '0009E4': 'K Tech Infosystem Inc.', '000972': 'Securebase,Inc', '000978': 'AIJI System Co., Ltd.', '000973': 'Lenten Technology Co., Ltd.', '000975': 'fSONA Communications Corporation', '000977': 'Brunner Elektronik AG', '000969': 'Meret Optical Communications', '000942': 'Wireless Technologies, Inc', '000945': 'Palmmicro Communications Inc', '00093E': 'C&I Technologies', '000940': 'AGFEO GmbH & Co. KG', '00097F': 'Vsecure 2000 LTD.', '000980': 'Power Zenith Inc.', '0009A0': 'Microtechno Corporation', '00099B': 'Western Telematic Inc.', '000990': 'ACKSYS Communications & systems', '000953': 'Linkage System Integration Co.Ltd.', '00094C': 'Communication Weaver Co.,Ltd.', '00096E': 'GIANT ELECTRONICS LTD.', '00096C': 'Imedia Semiconductor Corp.', '00095F': 'Telebyte, Inc.', '00098A': 'EqualLogic Inc', '0008F9': 'Artesyn Embedded Technologies', '0008F4': 'Bluetake Technology Co., Ltd.', '0008F7': 'Hitachi Ltd, Semiconductor & Integrated Circuits Gr', '000920': 'EpoX COMPUTER CO.,LTD.', '000922': 'TST Biometrics GmbH', '000916': 'Listman Home Technologies, Inc.', '000911': 'Cisco Systems, Inc', '000912': 'Cisco Systems, Inc', '000908': 'VTech Technology Corp.', '00090B': 'MTL Instruments PLC', '00093B': 'HYUNDAI NETWORKS INC.', '000934': 'Dream-Multimedia-Tv GmbH', '000931': 'Future Internet, Inc.', '0008F5': 'YESTECHNOLOGY Co.,Ltd.', '0008EC': 'Optical Zonu Corporation', '0008E6': 'Littlefeet', '000930': 'AeroConcierge Inc.', '00091E': 'Firstech Technology Corp.', '0008E2': 'Cisco Systems, Inc', '000905': 'iTEC Technologies Ltd.', '0008A5': 'Peninsula Systems Inc.', '0008A2': 'ADI Engineering, Inc.', '000898': 'Gigabit Optics Corporation', '00089B': 'ICP Electronics Inc.', '00089C': 'Elecs Industry Co., Ltd.', '00089D': 'UHD-Elektronik', '0008CF': 'Nippon Koei Power Systems Co., Ltd.', '0008CB': 'Zeta Broadband Inc.', '0008D3': 'Hercules Technologies S.A.S.', '0008D0': 'Musashi Engineering Co., LTD.', '0008BA': 'Erskine Systems Ltd', '0008B5': 'TAI GUEN ENTERPRISE CO., LTD', '0008B7': 'HIT Incorporated', '0008A9': 'SangSang Technology, Inc.', '0008C8': 'Soneticom, Inc.', '0008C4': 'Hikari Co.,Ltd.', '00088F': 'ADVANCED DIGITAL TECHNOLOGY', '00088B': 'Tropic Networks Inc.', '00086B': 'MIPSYS', '00087F': 'SPAUN electronic GmbH & Co. KG', '000886': 'Hansung Teliann, Inc.', '000858': 'Novatechnology Inc.', '000850': 'Arizona Instrument Corp.', '00085B': 'Hanbit Electronics Co., Ltd.', '00082B': 'Wooksung Electronics, Inc.', '00082E': 'Multitone Electronics PLC', '0007ED': 'Altera Corporation', '0007F1': 'TeraBurst Networks Inc.', '0007F2': 'IOA Corporation', '0007EA': 'Massana, Inc.', '0007F0': 'LogiSync LLC', '0007E7': 'FreeWave Technologies', '0007D6': 'Commil Ltd.', '0007D7': 'Caporis Networks AG', '0007D4': 'Zhejiang Yutong Network Communication Co Ltd.', '0007CE': 'Cabletime Limited', '0007D3': 'SPGPrints B.V.', '000813': 'Diskbank, Inc.', '00080F': 'Proximion Fiber Optics AB', '000812': 'GM-2 Corporation', '00080B': 'Birka BPA Informationssystem AB', '00080A': 'Espera-Werke GmbH', '0007E3': 'Navcom Technology, Inc.', '0007E1': 'WIS Communications Co. Ltd.', '0007E0': 'Palm Inc.', '0007FA': 'ITT Co., Ltd.', '0007F6': 'Qqest Software Systems', '0007FB': 'Giga Stream UMTS Technologies GmbH', '00085D': 'Mitel Corporation', '000855': 'NASA-Goddard Space Flight Center', '00085A': 'IntiGate Inc.', '000817': 'EmergeCore Networks LLC', '000815': 'CATS Co., Ltd.', '000818': 'Pixelworks, Inc.', '00082A': 'Powerwallz Network Security', '0007AC': 'Eolring', '0007AA': 'Quantum Data Inc.', '0007A4': 'GN Netcom Ltd.', '00079D': 'Musashi Co., Ltd.', '00079F': 'Action Digital Inc.', '00047C': 'Skidata AG', '0007BE': 'DataLogic SpA', '0007AF': 'Red Lion Controls, LP', '000767': 'Yuxing Electronics Company Limited', '00075B': 'Gibson Guitars', '000762': 'Group Sense Limited', '000784': 'Cisco Systems, Inc', '000776': 'Federal APD', '00077A': 'Infoware System Co., Ltd.', '000790': 'Tri-M Technologies (s) Limited', '00078D': 'NetEngines Ltd.', '00078A': 'Mentor Data System Inc.', '000792': 'Sütron Electronic GmbH', '0007B2': 'Transaccess S.A.', '0007AD': 'Pentacon GmbH Foto-und Feinwerktechnik', '0007D0': 'Automat Engenharia de Automação Ltda.', '00076A': 'NEXTEYE Co., Ltd.', '0006E6': 'DongYang Telecom Co., Ltd.', '0006DC': 'Syabas Technology (Amquest)', '00070C': 'SVA-Intrusion.com Co. Ltd.', '00070F': 'Fujant, Inc.', '000714': 'Brightcom', '0006F3': 'AcceLight Networks', '0006D2': 'Tundra Semiconductor Corp.', '0006D8': 'Maple Optical Systems', '0006CF': 'Thales Avionics In-Flight Systems, LLC', '0006FE': 'Ambrado, Inc', '0006E7': 'Bit Blitz Communications Inc.', '0006ED': 'Inara Networks', '0006DB': 'ICHIPS Co., Ltd.', '000727': 'Zi Corporation (HK) Ltd.', '00072E': 'North Node AB', '000699': 'Vida Design Co.', '00068D': 'SEPATON, Inc.', '0006A3': 'Bitran Corporation', '00069F': 'Kuokoa Networks', '0006A2': 'Microtune, Inc.', '0006A6': 'Artistic Licence Engineering Ltd', '0006B8': 'Bandspeed Pty Ltd', '0006BB': 'ATI Technologies Inc.', '0006BD': 'BNTECHNOLOGY Co., Ltd.', '0006C3': 'Schindler Elevator Ltd.', '0006AA': 'VT Miltope', '000657': 'Market Central, Inc.', '000697': 'R & D Center', '00069A': 'e & Tel', '00067D': 'Takasago Ltd.', '000671': 'Softing AG', '000672': 'Netezza', '00066A': 'InfiniCon Systems, Inc.', '000661': 'NIA Home Technologies Corp.', '0006B2': 'Linxtek Co.', '0006B6': 'Nir-Or Israel Ltd.', '000684': 'Biacore AB', '000682': 'Convedia', '0005E3': 'LightSand Communications, Inc.', '0005ED': 'Technikum Joanneum GmbH', '0005EF': 'ADOIR Digital Technology', '000600': 'Toshiba Teli Corporation', '00066B': 'Sysmex Corporation', '00055A': 'Power Dsine Ltd.', '000653': 'Cisco Systems, Inc', '00064F': 'PRO-NETS Technology Corporation', '000612': 'Accusys, Inc.', '000609': 'Crossport Systems', '000616': 'Tel Net Co., Ltd.', '00060D': 'Wave7 Optics', '00064A': 'Honeywell Co., Ltd. (KOREA)', '00063F': 'Everex Communications Inc.', '00063D': 'Microwave Data Systems Inc.', '000639': 'Newtec', '00062F': 'Pivotech Systems Inc.', '000636': 'Jedai Broadband Networks', '000651': 'Aspen Networks Inc.', '00065C': 'Malachite Technologies, Inc.', '000642': 'Genetel Systems Inc.', '0005E9': 'Unicess Network, Inc.', '0005E6': 'Egenera, Inc.', '00061E': 'Maxan Systems', '000622': 'Chung Fu Chen Yeh Enterprise Corp.', '00CBBD': 'Cambridge Broadband Networks Ltd.', '000588': 'Sensoria Corp.', '00058D': 'Lynx Photonic Networks, Inc.', '00058A': 'Netcom Co., Ltd.', '000591': 'Active Silicon Ltd', '000593': 'Grammar Engine Inc.', '0005C7': 'I/F-COM A/S', '0005CC': 'Sumtel Communications, Inc.', '0005CF': 'Thunder River Technologies, Inc.', '0005C6': 'Triz Communications', '0005D6': 'L-3 Linkabit', '0005DA': 'Apex Automationstechnik', '0005A1': 'Zenocom', '0005A9': 'Princeton Networks, Inc.', '0005AF': 'InnoScan Computing A/S', '0005A6': 'Extron Electronics', '0005A0': 'MOBILINE Kft.', '000584': 'AbsoluteValue Systems, Inc.', '00058F': 'CLCsoft co.', '0005BB': 'Myspace AB', '000517': 'Shellcomm, Inc.', '00051C': 'Xnet Technology Corp.', '0004F9': 'Xtera Communications, Inc.', '0004FA': 'NBS Technologies Inc.', '000541': 'Advanced Systems Co., Ltd.', '000545': 'Internet Photonics', '00053A': 'Willowglen Services Pte Ltd', '000531': 'Cisco Systems, Inc', '000539': 'A Brand New World in Sweden AB', '00052A': 'Ikegami Tsushinki Co., Ltd.', '00052D': 'Zoltrix International Limited', '000525': 'Puretek Industrial Co., Ltd.', '000558': 'Synchronous, Inc.', '000550': 'Vcomms Connect Limited', '000512': 'Zebra Technologies Inc', '000503': 'ICONAG', '00057A': 'Overture Networks', '000564': 'Tsinghua Bitway Co., Ltd.', '0004F8': 'QUALICABLE TV Industria E Com., Ltda', '0004F5': 'SnowShore Networks, Inc.', '0004F2': 'Polycom', '0004F3': 'FS FORTH-SYSTEME GmbH', '0004ED': 'Billion Electric Co., Ltd.', '0004E7': 'Lightpointe Communications, Inc', '0004E6': 'Banyan Network Private Limited', '0004DE': 'Cisco Systems, Inc', '0004A2': 'L.S.I. Japan Co., Ltd.', '00049B': 'Cisco Systems, Inc', '000499': 'Chino Corporation', '00048E': 'Ohm Tech Labs, Inc.', '0004A6': 'SAF Tehnika Ltd.', '0004A8': 'Broadmax Technologies, Inc.', '0004A1': 'Pathway Connectivity', '0004CE': 'Patria Ailon', '0004C7': 'NetMount', '00048D': ' Teo Technologies, Inc', '000490': 'Optical Access', '0004E1': 'Infinior Microsystems', '0004C2': 'Magnipix, Inc.', '000486': 'ITTC, University of Kansas', '00048B': 'Poscon Corporation', '000484': 'Amann GmbH', '000478': 'G. Star Technology Corporation', '00047A': 'AXXESSIT ASA', '00046A': 'Navini Networks', '00046B': 'Palm Wireless, Inc.', '000472': 'Telelynx, Inc.', '000464': 'Pulse-Link Inc', '000465': 'i.s.t isdn-support technik GmbH', '00045E': 'PolyTrax Information Technology AG', '000411': 'Inkra Networks, Inc.', '000410': 'Spinnaker Networks, Inc.', '000412': 'WaveSmith Networks, Inc.', '000442': 'NACT', '000445': 'LMS Skalar Instruments GmbH', '00044E': 'Cisco Systems, Inc', '000452': 'RocketLogix, Inc.', '000427': 'Cisco Systems, Inc', '000420': 'Slim Devices, Inc.', '00043D': 'INDEL AG', '00043B': 'Lava Computer Mfg., Inc.', '000431': 'GlobalStreams, Inc.', '0003C4': 'Tomra Systems ASA', '0003C5': 'Mobotix AG', '0003BF': 'Centerpoint Broadband Technologies, Inc.', '0003B9': 'Hualong Telecom Co., Ltd.', '0003F9': 'Pleiades Communications, Inc.', '0003FE': 'Cisco Systems, Inc', '0003F5': 'Chip2Chip', '000391': 'Advanced Digital Broadcast, Ltd.', '0003AC': 'Fronius Schweissmaschinen', '000399': 'Dongju Informations & Communications Co., Ltd.', '0003FD': 'Cisco Systems, Inc', '0003BA': 'Oracle Corporation', '0003AF': 'Paragea Communications', '00030B': 'Hunter Technology, Inc.', '0003C9': 'TECOM Co., Ltd.', '0003C1': 'Packet Dynamics Ltd', '0003DE': 'OTC Wireless', '000328': 'Mace Group, Inc.', '00032B': 'GAI Datenfunksysteme GmbH', '00032C': 'ABB Switzerland Ltd', '000323': 'Cornet Technology, Inc.', '000380': 'SSH Communications Security Corp.', '000382': 'A-One Co., Ltd.', '00037A': 'Taiyo Yuden Co., Ltd.', '000375': 'NetMedia, Inc.', '00035A': 'Photron Limited', '000353': 'Mitac, Inc.', '000356': 'Wincor Nixdorf International GmbH', '00034F': 'Sur-Gard Security', '00037B': 'IDEC IZUMI Corporation', '000367': 'Jasmine Networks, Inc.', '00036A': 'Mainnet, Ltd.', '00036B': 'Cisco Systems, Inc', '00036C': 'Cisco Systems, Inc', '00038F': 'Weinschel Corporation', '000384': 'AETA', '000387': 'Blaze Network Products', '000381': 'Ingenico International', '000340': 'Floware Wireless Systems, Ltd.', '0001EC': 'Ericsson Group', '000333': 'Digitel Co., Ltd.', '000338': 'Oak Technology', '000339': 'Eurologic Systems, Ltd.', '000331': 'Cisco Systems, Inc', '000330': 'Imagenics, Co., Ltd.', '00034A': 'RIAS Corporation', '0002CF': 'ZyGate Communications, Inc.', '0002D1': 'Vivotek, Inc.', '0002C2': 'Net Vision Telecom', '0002B6': 'Acrosser Technology Co., Ltd.', '0002B1': 'Anritsu, Ltd.', '0002AD': 'HOYA Corporation', '0002AE': 'Scannex Electronics Ltd.', '000304': 'Pacific Broadband Communications', '000301': 'EXFO', '0002FD': 'Cisco Systems, Inc', '000300': 'Barracuda Networks, Inc.', '0002BD': 'Bionet Co., Ltd.', '0002BE': 'Totsu Engineering, Inc.', '0002B9': 'Cisco Systems, Inc', '0002BA': 'Cisco Systems, Inc', '0002F9': 'MIMOS Berhad', '0002F3': 'Media Serve Co., Ltd.', '0002EA': 'Focus Enhancements', '000313': 'Access Media SPA', '000310': 'E-Globaledge Corporation', '00030A': 'Argus Technologies', '00031E': 'Optranet, Inc.', '000315': 'Cidco Incorporated', '000319': 'Infineon AG', '0002E7': 'CAB GmbH & Co KG', '0002DF': 'Net Com Systems, Inc.', '0002DB': 'NETSEC', '00024B': 'Cisco Systems, Inc', '00024D': 'Mannesman Dematic Colby Pty. Ltd.', '000250': 'Geyser Networks, Inc.', '000248': 'Pilz GmbH & Co.', '00024C': 'SiByte, Inc.', '00025A': 'Catena Networks', '00026E': 'NeGeN Access, Inc.', '000270': 'Crewave Co., Ltd.', '00023B': 'Ericsson', '000239': 'Visicom', '000233': 'Mantra Communications, Inc.', '0002A2': 'Hilscher GmbH', '000254': 'WorldGate', '00029A': 'Storage Apps', '00028F': 'Globetek, Inc.', '000287': 'Adapcom', '000281': 'Madge Ltd.', '000263': 'UPS Manufacturing SRL', '000240': 'Seedek Co., Ltd.', '0001F3': 'QPS, Inc.', '0001E4': 'Sitera, Inc.', '0001E3': 'Siemens AG', '0001EB': 'C-COM Corporation', '0001F2': 'Mark of the Unicorn, Inc.', '0001D9': 'Sigma, Inc.', '0001C5': 'Simpler Networks', '0001C9': 'Cisco Systems, Inc', '00021B': 'Kollmorgen-Servotronix', '00021E': 'SIMTEL S.R.L.', '000221': 'DSP Application, Ltd.', '0001F7': 'Image Display Systems, Inc.', '000200': 'Net & Sys Co., Ltd.', '0001C4': 'NeoWave, Inc.', '0001C1': 'Vitesse Semiconductor Corporation', '0001D8': 'Teltronics, Inc.', '000205': 'Hitachi Denshi, Ltd.', '000215': 'Cotas Computer Technology A/B', '00022A': 'Asound Electronic', '00018C': 'Mega Vision', '00018F': 'Kenetec, Inc.', '00017B': 'Heidelberger Druckmaschinen AG', '00019C': 'JDS Uniphase Inc.', '0001A3': 'GENESYS LOGIC, INC.', '000182': 'DICA TECHNOLOGIES AG', '000189': 'Refraction Technology, Inc.', '000193': 'Hanbyul Telecom Co., Ltd.', '0030F5': 'Wild Lab. Ltd.', '00015D': 'Oracle Corporation ', '000173': 'AMCC', '00016C': 'FOXCONN', '000175': 'Radiant Communications Corp.', '0001AF': 'Artesyn Embedded Technologies', '00018A': 'ROI COMPUTER AG', '000192': 'Texas Digital Systems', '00015C': 'CADANT INC.', '000169': 'Celestix Networks Pte Ltd.', '00016B': 'LightChip, Inc.', '0001B6': 'SAEJIN T&M Co., Ltd.', '0001AB': 'Main Street Networks', '000145': 'WINSYSTEMS, INC.', '000137': 'IT Farm Corporation', '00013C': 'TIW SYSTEMS', '000133': 'KYOWA Electronic Instruments C', '0001A5': 'Nextcomm, Inc.', '000190': 'SMK-M', '00014C': 'Berkeley Process Control', '000143': 'Cisco Systems, Inc', '00014B': 'Ennovate Networks, Inc.', '00013D': 'RiscStation Ltd.', '000120': 'OSCILLOQUARTZ S.A.', '003046': 'Controlled Electronic Manageme', '003098': 'Global Converging Technologies', '00300D': 'MMC Technology, Inc.', '003075': 'ADTECH', '00B069': 'Honewell Oy', '00B0C2': 'Cisco Systems, Inc', '00B03B': 'HiQ Networks', '000127': 'OPEN Networks Pty Ltd', '00010E': 'Bri-Link Technologies Co., Ltd', '003037': 'Packard Bell Nec Services', '003057': 'QTelNet, Inc.', '0030FC': 'Terawave Communications, Inc.', '00B086': 'LocSoft Limited', '0030A2': 'Lightner Engineering', '003042': 'DeTeWe-Deutsche Telephonwerke', '00B0C7': 'Tellabs Operations, Inc.', '00B02A': 'ORSYS GmbH', '000104': 'DVICO Co., Ltd.', '000106': 'Tews Datentechnik GmbH', '000109': 'Nagano Japan Radio Co., Ltd.', '00029C': '3COM', '00B019': 'UTC CCS', '00306F': 'SEYEON TECH. CO., LTD.', '00303D': 'IVA CORPORATION', '0030F4': 'STARDOT TECHNOLOGIES', '003052': 'ELASTIC NETWORKS', '003019': 'Cisco Systems, Inc', '003076': 'Akamba Corporation', '0030EC': 'BORGARDT', '0030F3': 'At Work Computers', '0030CC': 'Tenor Networks, Inc.', '0030B0': 'Convergenet Technologies', '0030EB': 'TURBONET COMMUNICATIONS, INC.', '0030A1': 'WEBGATE Inc.', '00306A': 'PENTA MEDIA CO., LTD.', '003086': 'Transistor Devices, Inc.', '003044': 'CradlePoint, Inc', '0030C2': 'COMONE', '003053': 'Basler AG', '0030D2': 'WIN TECHNOLOGIES, CO., LTD.', '003059': 'KONTRON COMPACT COMPUTERS AG', '003097': 'AB Regin', '00305F': 'Hasselblad', '0030DC': 'RIGHTECH CORPORATION', '003025': 'CHECKOUT COMPUTER SYSTEMS, LTD', '003012': 'DIGITAL ENGINEERING LTD.', '0030C6': 'CONTROL SOLUTIONS, INC.', '0030D6': 'MSC VERTRIEBS GMBH', '003041': 'SAEJIN T & M CO., LTD.', '00308C': 'Quantum Corporation', '0030E3': 'SEDONA NETWORKS CORP.', '0030BF': 'MULTIDATA GMBH', '00D00F': 'SPEECH DESIGN GMBH', '003058': 'API MOTION', '003034': 'SET ENGINEERING', '00304A': 'Fraunhofer IPMS', '00308D': 'Pinnacle Systems, Inc.', '0030A6': 'VIANET TECHNOLOGIES, LTD.', '00D0BF': 'PIVOTAL TECHNOLOGIES', '00303C': 'ONNTO CORP.', '003024': 'Cisco Systems, Inc', '0030F6': 'SECURELOGIX CORPORATION', '00D02F': 'VLSI TECHNOLOGY INC.', '0030D8': 'SITEK', '003016': 'ISHIDA CO., LTD.', '00D0B1': 'OMEGA ELECTRONICS SA', '00D016': 'SCM MICROSYSTEMS, INC.', '00D043': 'ZONAL RETAIL DATA SYSTEMS', '00D0C1': 'HARMONIC DATA SYSTEMS, LTD.', '00D0AC': 'Commscope, Inc', '00D07C': 'KOYO ELECTRONICS INC. CO.,LTD.', '00D0BC': 'Cisco Systems, Inc', '00D0CB': 'DASAN CO., LTD.', '00D019': 'DAINIPPON SCREEN CORPORATE', '00D035': 'BEHAVIOR TECH. COMPUTER CORP.', '00D0DB': 'MCQUAY INTERNATIONAL', '00D070': 'LONG WELL ELECTRONICS CORP.', '00D029': 'WAKEFERN FOOD CORPORATION', '00D0C3': 'VIVID TECHNOLOGY PTE, LTD.', '00D013': 'PRIMEX AEROSPACE COMPANY', '00D0A3': 'VOCAL DATA, INC.', '00D07E': 'KEYCORP LTD.', '00D020': 'AIM SYSTEM, INC.', '00D0C8': 'Prevas A/S', '005017': 'RSR S.R.L.', '005065': 'TDK-Lambda Corporation', '0050B9': 'XITRON TECHNOLOGIES, INC.', '00506B': 'SPX-ATEG', '00D076': 'Bank of America', '00D051': 'O2 MICRO, INC.', '00D0BB': 'Cisco Systems, Inc', '00D06E': 'TRENDVIEW RECORDERS LTD.', '00D05C': 'KATHREIN TechnoTrend GmbH', '00D0EA': 'NEXTONE COMMUNICATIONS, INC.', '00D064': 'MULTITEL', '00D05E': 'STRATABEAM TECHNOLOGY, INC.', '00D0AA': 'CHASE COMMUNICATIONS', '00D05D': 'INTELLIWORXX, INC.', '00D0A1': 'OSKAR VIERLING GMBH + CO. KG', '00D006': 'Cisco Systems, Inc', '00D02A': 'Voxent Systems Ltd.', '00D08F': 'ARDENT TECHNOLOGIES, INC.', '00D0FA': 'Thales e-Security Ltd.', '00D0EB': 'LIGHTERA NETWORKS, INC.', '0050A1': 'CARLO GAVAZZI, INC.', '00D0C0': 'Cisco Systems, Inc', '00D068': 'IWILL CORPORATION', '005029': '1394 PRINTER WORKING GROUP', '005081': 'MURATA MACHINERY, LTD.', '0050AC': 'MAPLE COMPUTER CORPORATION', '005049': 'Arbor Networks Inc', '00500D': 'SATORI ELECTORIC CO., LTD.', '0050A3': 'TransMedia Communications, Inc.', '0050A4': 'IO TECH, INC.', '00505C': 'TUNDO CORPORATION', '0050B3': 'VOICEBOARD CORPORATION', '00508C': 'RSI SYSTEMS', '0050E1': 'NS TECH ELECTRONICS SDN BHD', '0050DE': 'SIGNUM SYSTEMS CORP.', '005075': 'KESTREL SOLUTIONS', '0050ED': 'ANDA NETWORKS', '005096': 'SALIX TECHNOLOGIES, INC.', '005012': 'CBL - GMBH', '0050F2': 'MICROSOFT CORP.', '00504A': 'ELTECO A.S.', '0050C1': 'GEMFLEX NETWORKS, LTD.', '0050CF': 'VANLINK COMMUNICATION TECHNOLOGY RESEARCH INSTITUTE', '005024': 'NAVIC SYSTEMS, INC.', '0090BD': 'OMNIA COMMUNICATIONS, INC.', '0090B4': 'WILLOWBROOK TECHNOLOGIES', '009003': 'APLIO', '009031': 'MYSTICOM, LTD.', '00909D': 'NovaTech Process Solutions, LLC', '0090DD': 'MIHARU COMMUNICATIONS Inc', '009028': 'NIPPON SIGNAL CO., LTD.', '00907D': 'Lake Communications', '0090C9': 'DPAC Technologies', '00507B': 'MERLOT COMMUNICATIONS', '0050CD': 'DIGIANSWER A/S', '00502D': 'ACCEL, INC.', '00503A': 'DATONG ELECTRONICS LTD.', '005087': 'TERASAKI ELECTRIC CO., LTD.', '005026': 'COSYSTEMS, INC.', '00902C': 'DATA & CONTROL EQUIPMENT LTD.', '00901D': 'PEC (NZ) LTD.', '009097': 'Sycamore Networks', '009025': 'BAE Systems Australia (Electronic Systems) Pty Ltd', '00904C': 'Epigram, Inc.', '009084': 'ATECH SYSTEM', '00906A': 'TURNSTONE SYSTEMS, INC.', '009087': 'ITIS', '009051': 'ULTIMATE TECHNOLOGY CORP.', '009026': 'ADVANCED SWITCHING COMMUNICATIONS, INC.', '0090D3': 'GIESECKE & DEVRIENT GmbH', '009067': 'WalkAbout Computers, Inc.', '00902A': 'COMMUNICATION DEVICES, INC.', '00900D': 'Overland Storage Inc.', '0090CF': 'NORTEL', '009072': 'SIMRAD AS', '00902F': 'NETCORE SYSTEMS, INC.', '009098': 'SBC DESIGNS, INC.', '009045': 'Marconi Communications', '009036': 'ens, inc.', '00908B': 'Tattile SRL', '009044': 'ASSURED DIGITAL, INC.', '009091': 'DigitalScape, Inc.', '00907E': 'VETRONIX CORP.', '009050': 'Teleste Corporation', '00904D': 'SPEC S.A.', '0090FD': 'CopperCom, Inc.', '009039': 'SHASTA NETWORKS', '0090FC': 'NETWORK COMPUTING DEVICES', '009014': 'ROTORK INSTRUMENTS, LTD.', '00908D': 'VICKERS ELECTRONICS SYSTEMS', '009042': 'ECCS, Inc.', '009033': 'INNOVAPHONE AG', '009002': 'ALLGON AB', '0010D4': 'STORAGE COMPUTER CORPORATION', '000629': 'IBM Corp', '0010A9': 'ADHOC TECHNOLOGIES', '00108A': 'TeraLogic, Inc.', '001024': 'NAGOYA ELECTRIC WORKS CO., LTD', '0010D6': 'Exelis', '001048': 'HTRC AUTOMATION, INC.', '001097': 'WinNet Metropolitan Communications Systems, Inc.', '001085': 'POLARIS COMMUNICATIONS, INC.', '00100C': 'ITO CO., LTD.', '001006': 'Thales Contact Solutions Ltd.', '009009': 'I Controls, Inc.', '00908E': 'Nortel Networks Broadband Access', '00907C': 'DIGITALCAST, INC.', '0090D2': 'ARTEL VIDEO SYSTEMS', '0001FE': 'DIGITAL EQUIPMENT CORPORATION', '0090BE': 'IBC/INTEGRATED BUSINESS COMPUTERS', '00103C': 'IC ENSEMBLE, INC.', '001019': 'SIRONA DENTAL SYSTEMS GmbH & Co. KG', '0090DE': 'CARDKEY SYSTEMS, INC.', '00906B': 'APPLIED RESOURCES, INC.', '00107F': 'CRESTRON ELECTRONICS, INC.', '0010E2': 'ArrayComm, Inc.', '0010D2': 'NITTO TSUSHINKI CO., LTD', '0010D9': 'IBM JAPAN, FUJISAWA MT+D', '009066': 'Troika Networks, Inc.', '001094': 'Performance Analysis Broadband, Spirent plc', '001050': 'RION CO., LTD.', '00109C': 'M-SYSTEM CO., LTD.', '0010CE': 'VOLAMP, LTD.', '0010B2': 'COACTIVE AESTHETICS', '00105F': 'ZODIAC DATA SYSTEMS', '00103E': 'NETSCHOOLS CORPORATION', '0010CB': 'FACIT K.K.', '001038': 'MICRO RESEARCH INSTITUTE, INC.', '0010E0': 'Oracle Corporation ', '00107C': 'P-COM, INC.', '0010BD': 'THE TELECOMMUNICATION TECHNOLOGY COMMITTEE (TTC)', '001008': 'VIENNA SYSTEMS CORPORATION', '0010D1': 'Top Layer Networks, Inc.', '00106A': 'DIGITAL MICROWAVE CORPORATION', '00106F': 'TRENTON TECHNOLOGY INC.', '001034': 'GNP Computers', '001044': 'InnoLabs Corporation', '0010A1': 'KENDIN SEMICONDUCTOR, INC.', '0010A8': 'RELIANCE COMPUTER CORP.', '00106E': 'TADIRAN COM. LTD.', '00109A': 'NETLINE', '001089': 'WebSonic', '0010E6': 'APPLIED INTELLIGENT SYSTEMS, INC.', '00103B': 'HIPPI NETWORKING FORUM', '00E0B7': 'PI GROUP, LTD.', '00E083': 'JATO TECHNOLOGIES, INC.', '00E072': 'LYNK', '00E0AD': 'EES TECHNOLOGY, LTD.', '00E094': 'OSAI SRL', '00E032': 'MISYS FINANCIAL SYSTEMS, LTD.', '00E0C0': 'SEIWA ELECTRIC MFG. CO., LTD.', '00E0D1': 'TELSIS LIMITED', '00E0F0': 'ABLER TECHNOLOGY, INC.', '00E002': 'CROSSROADS SYSTEMS, INC.', '00E0D6': 'COMPUTER & COMMUNICATION RESEARCH LAB.', '00E074': 'TIERNAN COMMUNICATIONS, INC.', '00E0D9': 'TAZMO CO., LTD.', '00E055': 'INGENIERIA ELECTRONICA COMERCIAL INELCOM S.A.', '00E0B4': 'TECHNO SCOPE CO., LTD.', '00E071': 'EPIS MICROCOMPUTER', '00E066': 'ProMax Systems, Inc.', '00E093': 'ACKFIN NETWORKS', '00E042': 'Pacom Systems Ltd.', '00E0EB': 'DIGICOM SYSTEMS, INCORPORATED', '00E01C': 'Cradlepoint, Inc', '00E027': 'DUX, INC.', '00E04B': 'JUMP INDUSTRIELLE COMPUTERTECHNIK GmbH', '00E097': 'CARRIER ACCESS CORPORATION', '00E089': 'ION Networks, Inc.', '00E070': 'DH TECHNOLOGY', '00E05C': 'PHC Corporation', '00E024': 'GADZOOX NETWORKS', '00605B': 'IntraServer Technology, Inc.', '0060D7': 'ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE (EPFL)', '00E0BA': 'BERGHOF AUTOMATIONSTECHNIK GmbH', '00E021': 'FREEGATE CORP.', '00E05B': 'WEST END SYSTEMS CORP.', '00E044': 'LSICS CORPORATION', '00E0CA': 'BEST DATA PRODUCTS', '00E0A7': 'IPC INFORMATION SYSTEMS, INC.', '00E062': 'HOST ENGINEERING', '00E0CE': 'ARN', '00E05F': 'e-Net, Inc.', '00E01F': 'AVIDIA Systems, Inc.', '00E0D0': 'NETSPEED, INC.', '00E060': 'SHERWOOD', '00E06A': 'KAPSCH AG', '00E001': 'STRAND LIGHTING LIMITED', '00E0D8': 'LANBit Computer, Inc.', '00E0E7': 'RAYTHEON E-SYSTEMS, INC.', '00E03C': 'AdvanSys', '00E073': 'NATIONAL AMUSEMENT NETWORK, INC.', '006066': 'LACROIX Trafic', '0060F4': 'ADVANCED COMPUTER SOLUTIONS, Inc.', '006060': 'Data Innovations North America', '006035': 'DALLAS SEMICONDUCTOR, INC.', '006007': 'ACRES GAMING, INC.', '006058': 'COPPER MOUNTAIN COMMUNICATIONS, INC.', '0060FB': 'PACKETEER, INC.', '0060C1': 'WaveSpan Corporation', '00603C': 'HAGIWARA SYS-COM CO., LTD.', '00607D': 'SENTIENT NETWORKS INC.', '006019': 'Roche Diagnostics', '006059': 'TECHNICAL COMMUNICATIONS CORP.', '006003': 'TERAOKA WEIGH SYSTEM PTE, LTD.', '00607A': 'DVS GMBH', '0060F3': 'Performance Analysis Broadband, Spirent plc', '00607C': 'WaveAccess, Ltd.', '0060A0': 'SWITCHED NETWORK TECHNOLOGIES, INC.', '006017': 'TOKIMEC INC.', '006026': 'VIKING Modular Solutions', '00606E': 'DAVICOM SEMICONDUCTOR, INC.', '0060C7': 'AMATI COMMUNICATIONS CORP.', '1000E8': 'NATIONAL SEMICONDUCTOR', '006073': 'REDCREEK COMMUNICATIONS, INC.', '0060FD': 'NetICs, Inc.', '0060CB': 'HITACHI ZOSEN CORPORATION', '0060C8': 'KUKA WELDING SYSTEMS & ROBOTS', '006023': 'PERICOM SEMICONDUCTOR CORP.', '006063': 'PSION DACOM PLC.', '006031': 'HRK SYSTEMS', '00600E': 'WAVENET INTERNATIONAL, INC.', '0060A3': 'CONTINUUM TECHNOLOGY CORP.', '00603D': '3CX', '0060ED': 'RICARDO TEST AUTOMATION LTD.', '006012': 'POWER COMPUTING CORPORATION', '00604D': 'MMC NETWORKS, INC.', '0060F7': 'DATAFUSION SYSTEMS', '006020': 'PIVOTAL NETWORKING, INC.', '0060C0': 'Nera Networks AS', '006077': 'PRISA NETWORKS', '006094': 'IBM Corp', '0060AB': 'LARSCOM INCORPORATED', '0060DD': 'MYRICOM, INC.', '006046': 'VMETRO, INC.', '006068': 'Dialogic Corporation', '00605A': 'CELCORE, INC.', '006095': 'ACCU-TIME SYSTEMS, INC.', '00608A': 'CITADEL COMPUTER', '006093': 'VARIAN', '00A03F': 'COMPUTER SOCIETY MICROPROCESSOR & MICROPROCESSOR STANDARDS C', '00A02D': '1394 Trade Association', '00A07C': 'TONYANG NYLON CO., LTD.', '00A09A': 'NIHON KOHDEN AMERICA', '00A093': 'B/E AEROSPACE, Inc.', '00A078': 'Marconi Communications', '00A0BF': 'WIRELESS DATA GROUP MOTOROLA', '00A05F': 'BTG Electronics Design BV', '00A0CD': 'DR. JOHANNES HEIDENHAIN GmbH', '00A0DA': 'INTEGRATED SYSTEMS Technology, Inc.', '00A02A': 'TRANCELL SYSTEMS', '00A01C': 'NASCENT NETWORKS CORPORATION', '00A08F': 'DESKNET SYSTEMS, INC.', '00A0CC': 'LITE-ON COMMUNICATIONS, INC.', '00A0E6': 'DIALOGIC CORPORATION', '00A04A': 'NISSHIN ELECTRIC CO., LTD.', '00A035': 'CYLINK CORPORATION', '00A03D': 'OPTO-22', '00A056': 'MICROPROSS', '00A0E1': 'WESTPORT RESEARCH ASSOCIATES, INC.', '00A0B7': 'CORDANT, INC.', '00A026': 'TELDAT, S.A.', '00A023': 'APPLIED CREATIVE TECHNOLOGY, INC.', '00A089': 'XPOINT TECHNOLOGIES, INC.', '00A007': 'APEXX TECHNOLOGY, INC.', '00A047': 'INTEGRATED FITNESS CORP.', '00A032': 'GES SINGAPORE PTE. LTD.', '00A0E3': 'XKL SYSTEMS CORP.', '00A014': 'CSIR', '00A015': 'WYLE', '00A06A': 'Verilink Corporation', '00A018': 'CREATIVE CONTROLLERS, INC.', '00A0FE': 'BOSTON TECHNOLOGY, INC.', '00A0EB': 'Encore Networks, Inc.', '00A07D': 'SEEQ TECHNOLOGY, INC.', '00A0D9': 'CONVEX COMPUTER CORPORATION', '00A070': 'COASTCOM', '0020DE': "JAPAN DIGITAL LABORAT'Y CO.LTD", '00200B': 'OCTAGON SYSTEMS CORP.', '002094': 'CUBIX CORPORATION', '0020F7': 'CYBERDATA CORPORATION', '0020D7': 'JAPAN MINICOMPUTER SYSTEMS CO., Ltd.', '0020C3': 'COUNTER SOLUTIONS LTD.', '002047': 'STEINBRECHER CORP.', '0020D5': 'VIPA GMBH', '00201A': 'MRV Communications, Inc.', '0020F2': 'Oracle Corporation ', '0020B8': 'PRIME OPTION, INC.', '0020AD': 'LINQ SYSTEMS', '00207D': 'ADVANCED COMPUTER APPLICATIONS', '00202F': 'ZETA COMMUNICATIONS, LTD.', '00209A': 'THE 3DO COMPANY', '002062': 'SCORPION LOGIC, LTD.', '002081': 'TITAN ELECTRONICS', '0020D9': 'PANASONIC TECHNOLOGIES, INC./MIECO-US', '00206F': 'FLOWPOINT CORPORATION', '002020': 'MEGATRON COMPUTER INDUSTRIES PTY, LTD.', '00201B': 'NORTHERN TELECOM/NETWORK', '0020F3': 'RAYNET CORPORATION', '002090': 'ADVANCED COMPRESSION TECHNOLOGY, INC.', '0020C0': 'PULSE ELECTRONICS, INC.', '00207E': 'FINECOM CO., LTD.', '00204E': 'NETWORK SECURITY SYSTEMS, INC.', '0020CA': 'DIGITAL OCEAN', '002095': 'RIVA ELECTRONICS', '0020FB': 'OCTEL COMMUNICATIONS CORP.', '002070': 'HYNET, LTD.', '0020BE': 'LAN ACCESS CORP.', '00203F': 'JUKI CORPORATION', '0020A9': 'WHITE HORSE INDUSTRIAL', '002096': 'Invensys', '00204A': 'PRONET GMBH', '0020FF': 'SYMMETRICAL TECHNOLOGIES', '002044': 'GENITECH PTY LTD', '0020EF': 'USC CORPORATION', '002030': 'ANALOG & DIGITAL SYSTEMS', '0020AC': 'INTERFLEX DATENSYSTEME GMBH', '0020D8': 'Nortel Networks', '002066': 'GENERAL MAGIC, INC.', '002001': 'DSP SOLUTIONS, INC.', '0020BF': 'AEHR TEST SYSTEMS', '002053': 'HUNTSVILLE MICROSYSTEMS, INC.', '0020A1': 'DOVATRON', '00C02F': 'OKUMA CORPORATION', '00C01E': 'LA FRANCAISE DES JEUX', '00C0E1': 'SONIC SOLUTIONS', '002036': 'BMC SOFTWARE', '0020F8': 'CARRERA COMPUTERS, INC.', '00C065': 'SCOPE COMMUNICATIONS, INC.', '00C079': 'FONSYS CO.,LTD.', '00C00F': 'QUANTUM SOFTWARE SYSTEMS LTD.', '00C087': 'UUNET TECHNOLOGIES, INC.', '00C006': 'NIPPON AVIONICS CO., LTD.', '00C0A4': 'UNIGRAF OY', '00C029': 'Nexans Deutschland GmbH - ANS', '00C0FA': 'CANARY COMMUNICATIONS, INC.', '00C03A': 'MEN-MIKRO ELEKTRONIK GMBH', '00C040': 'ECCI', '00C01C': 'INTERLINK COMMUNICATIONS LTD.', '00C042': 'DATALUX CORP.', '00C071': 'AREANEX COMMUNICATIONS, INC.', '00C044': 'EMCOM CORPORATION', '00C0E6': 'Verilink Corporation', '00C096': 'TAMURA CORPORATION', '00C04E': 'COMTROL CORPORATION', '00C03F': 'STORES AUTOMATED SYSTEMS, INC.', '00C036': 'RAYTECH ELECTRONIC CORP.', '00C0A2': 'INTERMEDIUM A/S', '00C053': 'Aspect Software Inc.', '00C0CC': 'TELESCIENCES CO SYSTEMS, INC.', '00C0CE': 'CEI SYSTEMS & ENGINEERING PTE', '00404F': 'SPACE & NAVAL WARFARE SYSTEMS', '00408F': 'WM-DATA MINFO AB', '0040D7': 'STUDIO GEN INC.', '004057': 'LOCKHEED - SANDERS', '004017': 'Silex Technology America', '00C0D9': 'QUINTE NETWORK CONFIDENTIALITY', '00C0B1': 'GENIUS NET CO.', '00C0D2': 'SYNTELLECT, INC.', '00C07E': 'KUBOTA CORPORATION ELECTRONIC', '00C0DD': 'QLogic Corporation', '00C01B': 'SOCKET COMMUNICATIONS, INC.', '00406F': 'SYNC RESEARCH INC.', '0040F3': 'NETCOR', '00404B': 'MAPLE COMPUTER SYSTEMS', '004033': 'ADDTRON TECHNOLOGY CO., LTD.', '00C08E': 'NETWORK INFORMATION TECHNOLOGY', '00C0C7': 'SPARKTRUM MICROSYSTEMS, INC.', '00C0C4': 'COMPUTER OPERATIONAL', '00C012': 'NETSPAN CORPORATION', '0040AD': 'SMA REGELSYSTEME GMBH', '00406D': 'LANCO, INC.', '0040CD': 'TERA MICROSYSTEMS, INC.', '0040F5': 'OEM ENGINES', '004039': 'OPTEC DAIICHI DENKO CO., LTD.', '004079': 'JUKO MANUFACTURE COMPANY, LTD.', '00C020': 'ARCO ELECTRONIC, CONTROL LTD.', '00C0E7': 'FIBERDATA AB', '00C05F': 'FINE-PAL COMPANY LIMITED', '0040AE': 'DELTA CONTROLS, INC.', '0040F6': 'KATRON COMPUTERS INC.', '004086': 'MICHELS & KLEBERHOFF COMPUTER', '004092': 'ASP COMPUTER PRODUCTS, INC.', '004068': 'EXTENDED SYSTEMS', '004078': 'WEARNES AUTOMATION PTE LTD', '0040F4': 'CAMEO COMMUNICATIONS, INC.', '0040B4': 'NEXTCOM K.K.', '0040B0': 'BYTEX CORPORATION, ENGINEERING', '0080D9': 'EMK Elektronik GmbH & Co. KG', '004059': 'YOSHIDA KOGYO K. K.', '004095': "R.P.T. INTERGROUPS INT'L LTD.", '004035': 'OPCOM', '00405C': 'FUTURE SYSTEMS, INC.', '004061': 'DATATECH ENTERPRISES CO., LTD.', '00008C': 'Alloy Computer Products (Australia) Pty Ltd', '0040B9': 'MACQ ELECTRONIQUE SA', '0040BB': 'GOLDSTAR CABLE CO., LTD.', '0040B1': 'CODONICS INC.', '0040F8': 'SYSTEMHAUS DISCOM', '0040D2': 'PAGINE CORPORATION', '004024': 'COMPAC INC.', '0040E9': 'ACCORD SYSTEMS, INC.', '004003': 'Emerson Process Management Power & Water Solutions, Inc.', '004090': 'ANSEL COMMUNICATIONS', '0040C5': 'MICOM COMMUNICATIONS INC.', '004020': 'CommScope Inc', '004048': 'SMD INFORMATICA S.A.', '00407C': 'QUME CORPORATION', '00407F': 'FLIR Systems', '00402D': 'HARRIS ADACOM CORPORATION', '0080A6': 'REPUBLIC TECHNOLOGY, INC.', '0040DE': 'Elsag Datamat spa', '0040C9': 'NCUBE', '008032': 'ACCESS CO., LTD.', '0080CF': 'EMBEDDED PERFORMANCE INC.', '008090': 'MICROTEK INTERNATIONAL, INC.', '004044': 'QNIX COMPUTER CO., LTD.', '0080C4': 'DOCUMENT TECHNOLOGIES, INC.', '00805B': 'CONDOR SYSTEMS, INC.', '008043': 'NETWORLD, INC.', '0040DF': 'DIGALOG SYSTEMS, INC.', '004009': 'TACHIBANA TECTRON CO., LTD.', '0040A0': 'GOLDSTAR CO., LTD.', '0040FC': 'IBR COMPUTER TECHNIK GMBH', '0080AF': 'ALLUMER CO., LTD.', '008084': 'THE CLOUD INC.', '0080F3': 'SUN ELECTRONICS CORP.', '008099': 'Eaton Industries GmbH', '00808D': 'WESTCOAST TECHNOLOGY B.V.', '0080BE': 'ARIES RESEARCH', '008015': 'SEIKO SYSTEMS, INC.', '0080D2': 'SHINNIHONDENKO CO., LTD.', '008089': 'TECNETICS (PTY) LTD.', '00806F': 'ONELAN LTD.', '008081': 'KENDALL SQUARE RESEARCH CORP.', '00809C': 'LUXCOM, INC.', '008065': 'CYBERGRAPHIC SYSTEMS PTY LTD.', '008019': 'DAYNA COMMUNICATIONS, INC.', '008050': 'ZIATECH CORPORATION', '0080A4': 'LIBERTY ELECTRONICS', '0080CD': 'MICRONICS COMPUTER, INC.', '008003': 'HYTEC ELECTRONICS LTD.', '008052': 'TECHNICALLY ELITE CONCEPTS', '00805D': 'CANSTAR', '00804F': 'DAIKIN INDUSTRIES, LTD.', '008005': 'CACTUS COMPUTER INC.', '00806D': 'CENTURY SYSTEMS CORP.', '008094': 'ALFA LAVAL AUTOMATION AB', '000041': 'ICE CORPORATION', '000086': 'MEGAHERTZ CORPORATION', '000092': 'COGENT DATA TECHNOLOGIES', '000058': 'RACORE COMPUTER PRODUCTS INC.', '008074': 'FISHER CONTROLS', '008030': 'NEXUS ELECTRONICS', '000055': 'COMMISSARIAT A L`ENERGIE ATOM.', '0080C9': 'ALBERTA MICROELECTRONIC CENTRE', '00800C': 'VIDECOM LIMITED', '00807D': 'EQUINOX SYSTEMS INC.', '008063': 'Hirschmann Automation and Control GmbH', '0080EE': 'THOMSON CSF', '00808E': 'RADSTONE TECHNOLOGY', '008096': 'HUMAN DESIGNED SYSTEMS, INC.', '0080DA': 'Bruel & Kjaer Sound & Vibration Measurement A/S', '00803E': 'SYNERNETICS', '0080CE': 'BROADCAST TELEVISION SYSTEMS', '00801A': 'BELL ATLANTIC', '0080DE': 'GIPSI S.A.', '008002': 'SATELCOM (UK) LTD', '008064': 'WYSE TECHNOLOGY LLC', '008048': 'COMPEX INCORPORATED', '008085': 'H-THREE SYSTEMS CORPORATION', '00804C': 'CONTEC CO., LTD.', '00808F': 'C. ITOH ELECTRONICS, INC.', '000052': 'Intrusion.com, Inc.', '0080FF': 'SOC. DE TELEINFORMATIQUE RTC', '0000F7': 'YOUTH KEEP ENTERPRISE CO LTD', '0000C7': 'ARIX CORPORATION', '0000D1': 'ADAPTEC INCORPORATED', '000016': 'DU PONT PIXEL SYSTEMS .', '0000E1': 'GRID SYSTEMS', '000081': 'Bay Networks', '000029': 'IMC NETWORKS CORP.', '00000A': 'OMRON TATEISI ELECTRONICS CO.', '00000D': 'FIBRONICS LTD.', '000024': 'CONNECT AS', '000067': 'SOFT * RITE, INC.', '0000D2': 'SBE, INC.', '000037': 'OXFORD METRICS LIMITED', '0000FB': 'RECHNER ZUR KOMMUNIKATION', '0000E2': 'ACER TECHNOLOGIES CORP.', '0000BA': 'SIIG, INC.', '00002A': 'TRW - SEDD/INP', '00002C': 'AUTOTOTE LIMITED', '000083': 'TADPOLE TECHNOLOGY PLC', '0000F3': 'GANDALF DATA LIMITED', '0000B0': 'RND-RAD NETWORK DEVICES', '0000CF': 'HAYES MICROCOMPUTER PRODUCTS', '000056': 'DR. B. STRUCK', '00006C': 'Private', '0000A9': 'NETWORK SYSTEMS CORP.', '0000EF': 'KTI', '000025': 'RAMTEK CORP.', '0000AF': 'Canberra Industries, Inc.', '000076': 'ABEKAS VIDEO SYSTEM', '080055': 'STANFORD TELECOMM. INC.', '080053': 'MIDDLE EAST TECH. UNIVERSITY', '08008E': 'Tandem Computers', '080084': 'TOMEN ELECTRONICS CORP.', '080085': 'ELXSI', '080082': 'VERITAS SOFTWARE', '080080': 'AES DATA INC.', '080077': 'TSL COMMUNICATIONS LTD.', '080074': 'CASIO COMPUTER CO. LTD.', '08006E': 'MASSCOMP', '080068': 'RIDGE COMPUTERS', '080063': 'PLESSEY', '08007B': 'SANYO ELECTRIC CO. LTD.', '08007C': 'VITALINK COMMUNICATIONS CORP.', '0000DF': 'BELL & HOWELL PUB SYS DIV', '0000F9': 'QUOTRON SYSTEMS INC.', '080060': 'INDUSTRIAL NETWORKING INC.', '08004C': 'HYDRA COMPUTER SYSTEMS INC.', '080047': 'SEQUENT COMPUTER SYSTEMS INC.', '08004A': 'BANYAN SYSTEMS INC.', '080044': 'DAVID SYSTEMS INC.', '080041': 'RACAL-MILGO INFORMATION SYS..', '080037': 'FUJI-XEROX CO. LTD.', '080035': 'MICROFIVE CORPORATION', '080032': 'TIGAN INCORPORATED', '08008D': 'XYVISION INC.', '080015': 'STC BUSINESS SYSTEMS', '080042': 'JAPAN MACNICS CORP.', '080066': 'AGFA CORPORATION', '080004': 'CROMEMCO INCORPORATED', '08003F': 'FRED KOSCHARA ENTERPRISES', '08000B': 'UNISYS CORPORATION', '00DD01': 'UNGERMANN-BASS INC.', '00DD06': 'UNGERMANN-BASS INC.', 'AA0003': 'DIGITAL EQUIPMENT CORPORATION', '000000': 'XEROX CORPORATION', '080021': '3M COMPANY', '02BB01': 'OCTOTHORPE CORP.', '08008A': 'PerfTech, Inc.', '00003E': 'SIMPACT', '00DD02': 'UNGERMANN-BASS INC.', '00DD04': 'UNGERMANN-BASS INC.', '026086': 'LOGIC REPLACEMENT TECH. LTD.', '74DA88': 'TP-LINK TECHNOLOGIES CO.,LTD.', 'CC32E5': 'TP-LINK TECHNOLOGIES CO.,LTD.', '1C3BF3': 'TP-LINK TECHNOLOGIES CO.,LTD.', '3C86D1': 'vivo Mobile Communication Co., Ltd.', '301B97': 'Lierda Science & Technology Group Co.,Ltd', '2CD2E3': 'Guangzhou Aoshi Electronic Co.,Ltd', '5859C2': 'Aerohive Networks Inc.', '1459C3': 'Creative Chips GmbH', 'A0D86F': 'ARGO AI, LLC', 'E4671E': 'SHEN ZHEN NUO XIN CHENG TECHNOLOGY co., Ltd.', '682719': 'Microchip Technology Inc.', '24C17A': 'BEIJING IACTIVE NETWORK CO.,LTD', '1C9C8C': 'Juniper Networks', 'A4C939': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', '34D772': 'Xiamen Yudian Automation Technology Co., Ltd ', 'C0DCDA': 'Samsung Electronics Co.,Ltd', '04B429': 'Samsung Electronics Co.,Ltd', '48794D': 'Samsung Electronics Co.,Ltd', '44F971': 'SHENZHEN MERCURY COMMUNICATION TECHNOLOGIES CO.,LTD.', '18F9C4': 'BAE Systems', '60ABD2': 'Bose Corporation', 'F0EF86': 'Google, Inc.', 'E4C0CC': 'China Mobile Group Device Co.,Ltd.', '5CB13E': 'Sagemcom Broadband SAS', 'F4E5F2': 'HUAWEI TECHNOLOGIES CO.,LTD', '541310': 'HUAWEI TECHNOLOGIES CO.,LTD', '8CE5EF': 'HUAWEI TECHNOLOGIES CO.,LTD', 'A4CD23': 'Shenzhenshi Xinzhongxin Co., Ltd', 'B83A5A': 'Aruba, a Hewlett Packard Enterprise Company', 'E4AAEA': 'Liteon Technology Corporation', 'A0946A': 'Shenzhen XGTEC Technology Co,.Ltd.', '1C2AA3': 'Shenzhen HongRui Optical Technology Co., Ltd.', '388E7A': 'AUTOIT', '4C710C': 'Cisco Systems, Inc', '4C710D': 'Cisco Systems, Inc', '9C31C3': 'BSkyB Ltd', '6C24A6': 'vivo Mobile Communication Co., Ltd.', '9C5F5A': 'GUANGDONG OPPO MOBILE TELECOMMUNICATIONS CORP.,LTD', 'E447B3': 'zte corporation', 'FCDB21': 'SAMSARA NETWORKS INC', '607771': 'Texas Instruments', 'B4E8C9': 'XADA Technologies', '7C210E': 'Cisco Systems, Inc', '44DC4E': 'ITEL MOBILE LIMITED', 'D8CF89': 'Beijing DoSee Science and Technology Co., Ltd.', '04AAE1': 'BEIJING MICROVISION TECHNOLOGY CO.,LTD', '382A19': 'Technica Engineering GmbH', '74D654': 'GINT'} def int_list(liste:list): """"Returns a list with integers""" l=[] for i in liste: try: l.append(int(i)) except ValueError: return False return l def get_local_ip(): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) try: s.connect(('10.255.255.255', 1)) ip = s.getsockname()[0] except: ip = False finally: s.close() return ip def test_internet(): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: test = s.connect(("google.com",80)) except: test = False finally: return test if test_internet() == False: print("** No internet") #Variables and basic fonction calls and parsing arguments #retreive_vendors() local_ip = get_local_ip()# -> 10.10.10.10 third = ".".join(local_ip.split(".")[0:-1])+"."# -> 10.10.10. first_and_second = ".".join(local_ip.split(".")[:2])+"."# -> 10.10. result = {} streds = [] result_lenght = int scan_duration = float if not "-" in args.hostnumber: exit("*Hostnumber must be as : 1-66 for eg") hostnumber = int_list(args.hostnumber.split("-")) all_network = args.all subnetwork = args.subnetwork pretty_print = args.prettyprint ping_amount = '2' if args.ping == None else args.ping ping_arg = "-c" if platform != "win32" else "-n" #Condition to run the program if all_network and subnetwork: exit("Arguments all and subnetwork can't be used at the same time") if not hostnumber: exit("*hostnumber requires integers") if hostnumber[0] > hostnumber[1]: exit("*First part of '1-25' for hostnumber must be greater that the second one") if hostnumber[0] < 0 or hostnumber[1] > 256: exit("*Hostnumber must be between 1 and 256") #Other function definitions def ping(ip:str): ping = subprocess.Popen(['ping',ping_arg,ping_amount,ip], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0] if 'ttl' in str(ping) or 'TTL' in str(ping): result[ip] = {} def nslookup(ip:str): #Get hostnames with nslookup.exe dns_lookup = subprocess.Popen(['nslookup',ip], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() if b"*" not in dns_lookup[1] or not b"*" not in dns_lookup[0]: if b"\t" in dns_lookup[0]: result[ip]["name"] = dns_lookup[0].split(b"\t")[-1].split(b"=")[1].replace(b".\n",b"").replace(b" ",b"").replace(b"\n",b"") else: result[ip]["name"] = dns_lookup[0].split(b":")[-2].replace(b" ",b"").split(b"\n")[0].replace(b"\r",b"") def _get_mac_vendor(mac:str): if mac == None: return None try: id = [i.upper() for i in "".join(mac.split(":")[:3]).split()][0] except IndexError as e: return e for identifier in vendors.keys(): if id == identifier: return vendors[identifier] def mac(ip:str): _mac = getmac.get_mac_address(ip=ip) if _mac == None: _mac = getmac.get_mac_address(ip=ip) result[ip]["mac"] = str(_mac) result[ip]["vendor"] = str(_get_mac_vendor(_mac)) def repeated(target): for ipadr in list(result.keys()): stred = threading.Thread(target=target,args=[ipadr,]) streds.append(stred) stred.start() for x in streds: x.join() if subnetwork != None: if "-" in subnetwork: subnetwork = int_list(subnetwork.split("-")) if not subnetwork: exit("*SUBNETWORK requires integers") if len(subnetwork) != 2: exit("*SUBNETWORK must be like : 1-25, not 1-25-25 for eg") if subnetwork[0] > subnetwork[1]: exit("*First part of '1-25' for subnetwork should be greater that the second one") if subnetwork[0] < 0 or subnetwork[1] > 256: exit("*Subnetwork must be between 1 and 256") #Scan: #10.10.x.1 to 10.10.x.1/25 for subnet in range(subnetwork[0],subnetwork[1]): thr = first_and_second+str(subnet)+"." for i in range(hostnumber[0],hostnumber[1]): stred = threading.Thread(target=ping,args=[thr+str(i),]) streds.append(stred) stred.start() for x in streds: x.join() repeated(nslookup) repeated(mac) elif "," in subnetwork: subnetwork = int_list(subnetwork.split(",")) if not subnetwork: exit("*SUBNETWORK requires integers") for subnet in subnetwork: if subnet < 1 or subnet > 256: exit("*Each digit in subnetwork argument must be between 1 and 256") #Scan: #10.10.x.1 to 10.10.x.1/25 #1,25,50 thr = first_and_second+str(subnet)+"."#10.10.x. for i in range(hostnumber[0],hostnumber[1]): stred = threading.Thread(target=ping,args=[thr+str(i),]) streds.append(stred) stred.start() for x in streds: x.join() repeated(nslookup) repeated(mac) else: try: subnetwork=int(subnetwork) except ValueError: exit("*SUBNETWORK requires integers") if subnetwork < 1 or subnetwork > 256: exit("*The subnetwork argument must be between 1 and 256") thr = first_and_second+str(subnetwork)+"." for i in range(int(hostnumber[0]),int(hostnumber[1])): stred = threading.Thread(target=ping,args=[thr+str(i),]) streds.append(stred) stred.start() for x in streds: x.join() repeated(nslookup) repeated(mac) if all_network: #10.10.0.1 to 10.10.255.1 #Ping from x to y for subnet in range(1,255): thr = first_and_second+str(subnet)+"."# -> '10.10.' + '1' + '.' for i in range(int(hostnumber[0]),int(hostnumber[1])): #print(thr+str(i))# -> prints ip to be scanned stred = threading.Thread(target=ping,args=[thr+str(i),]) streds.append(stred) stred.start() for x in streds: x.join() repeated(nslookup) repeated(mac) if args.prettyprint: import pprint pprint.pprint(result) else: print(result)
monitor.py
import sys sys.path.append(r"/home/anoldfriend/OpenFOAM/anoldfriend-7/utilities/") import signal import multiprocessing as mp import time from residual_monitor import read_residuals,plot_multiple_residuals,quit log="run.log" pressure_name="p_rgh" nCorrectors=1 interval=1 sample_size=200 # m_residuals=[["h"],["Ux","Uy",pressure_name]] # m_residuals=[["h"],["Ux",pressure_name]] m_residuals=[["h","CO2","O2"]] m_thresholds=[[1e-1,1e-4,1e-5,1e-6,1e-7]] m_save_files=["residuals1.jpg"] def process_fun(): line_offset=0 iterations_offset=0 while True: df,line_offset,iterations,info=read_residuals(log,line_offset,pressure_name,nCorrectors,sample_size) if "cum_physical_time" in info.keys(): physical_time=info["cum_physical_time"] else: physical_time="not found" if "cum_execution_time" in info.keys(): execution_time=info["cum_execution_time"] else: execution_time="not found" title=f"physical time : {physical_time} s, execution time : {execution_time} s" titles=[title]*len(m_residuals) if "latest_delta_time" in info.keys(): delta_time=info["latest_delta_time"] else: delta_time= "not found" if "maxCo" in info.keys(): maxCo=info["maxCo"] else: maxCo="not found" if "meanCo" in info.keys(): meanCo=info["meanCo"] else: meanCo="not found" text=f"latest_delta_time: {delta_time} s \n" + \ f"mean CFL num: {meanCo}\n" + \ f"max CFL num: {maxCo}" texts=[text]*len(m_residuals) plot_multiple_residuals(df,iterations_offset,m_residuals,m_thresholds,titles,texts,m_save_files) iterations_offset+=iterations time.sleep(interval) if __name__=="__main__": try: signal.signal(signal.SIGINT,quit) signal.signal(signal.SIGTERM,quit) p=mp.Process(target=process_fun) p.start() p.deamon=True while True: pass except Exception as err: print(f"Error Message: {err}")
player_blink_gui.py
import os os.environ["OPENCV_VIDEOIO_MSMF_ENABLE_HW_TRANSFORMS"] = "0" import cv2 import heapq import json import os.path import signal import sys import threading import time import tkinter as tk import tkinter.filedialog as fd from tkinter import ttk from os import listdir from os.path import isfile, join from PIL import Image, ImageTk os.chdir(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) import rngtool from xorshift import Xorshift class Application(tk.Frame): def __init__(self, master=None): super().__init__(master) self.master = master self.rng = None self.previewing = False self.monitoring = False self.reidentifying = False self.tidsiding = False self.timelining = False self.config_json = {} self.default_config = { "MonitorWindow": True, "WindowPrefix": "SysDVR-Client [PID ", "image": "./images/cave/eye.png", "view": [0, 0, 0, 0], "thresh": 0.9, "white_delay": 0.0, "advance_delay": 0, "advance_delay_2": 0, "npc": 0, "timeline_npc": 0, "pokemon_npc": 0, "crop": [0,0,0,0], "camera": 0, "display_percent": 80 } self.pack() self.create_widgets() signal.signal(signal.SIGINT, self.signal_handler) def update_configs(self,event=None): self.config_jsons = [f for f in listdir("configs") if isfile(join("configs", f))] self.config_combobox['values'] = self.config_jsons def create_widgets(self): self.master.title("Player Blink") ttk.Label(self,text="Progress:").grid(column=0,row=0) ttk.Label(self,text="S[0-3]:").grid(column=0,row=3) ttk.Label(self,text="S[0-1]:").grid(column=0,row=7) ttk.Label(self,text="Advances:").grid(column=0,row=10) ttk.Label(self,text="Timer:").grid(column=0,row=11) ttk.Label(self,text="X to advance:").grid(column=0,row=12) self.progress = ttk.Label(self,text="0/0") self.progress.grid(column=1,row=0) self.config_combobox = ttk.Combobox(self, state="readonly", values=[]) self.config_combobox.grid(column=2,row=0) self.config_combobox.bind("<<ComboboxSelected>>", self.config_combobox_onchange) self.config_combobox.bind("<Button-1>", self.update_configs) self.update_configs() self.new_config_button = ttk.Button(self,text="+",command=self.new_config,width=2) self.new_config_button.grid(column=3,row=0,columnspan=2) self.eye_display = ttk.Label(self) self.eye_display.grid(column=2,row=1) self.prefix_input = ttk.Entry(self) self.prefix_input.grid(column=2,row=2) ttk.Label(self,text="Camera:").grid(column=3,row=1) self.camera_index = tk.Spinbox(self, from_= 0, to = 99, width = 5) self.camera_index.grid(column=4,row=1) self.monitor_window_var = tk.IntVar() self.monitor_window = ttk.Checkbutton(self,text="Monitor Window",variable=self.monitor_window_var) self.monitor_window.grid(column=3,row=2,columnspan=2) self.monitor_display_buffer = ttk.Label(self) self.monitor_display_buffer.grid(column=2,row=3,rowspan=64,columnspan=2) self.monitor_display = ttk.Label(self) self.monitor_display.grid(column=2,row=3,rowspan=64,columnspan=2) self.monitor_blink_button = ttk.Button(self, text="Monitor Blinks", command=self.monitor_blinks) self.monitor_blink_button.grid(column=5,row=0) self.reidentify_button = ttk.Button(self, text="Reidentify", command=self.reidentify) self.reidentify_button.grid(column=5,row=1) self.preview_button = ttk.Button(self, text="Preview", command=self.preview) self.preview_button.grid(column=5,row=2) self.stop_tracking_button = ttk.Button(self, text="Stop Tracking", command=self.stop_tracking) self.stop_tracking_button.grid(column=5,row=3) self.timeline_button = ttk.Button(self, text="Timeline", command=self.timeline) self.timeline_button.grid(column=5,row=4) self.tidsid_button = ttk.Button(self, text="TID/SID", command=self.tidsid) self.tidsid_button.grid(column=5,row=5) x = y = w = h = 0 th = 0.9 ttk.Label(self,text="X").grid(column=6,row=1) ttk.Label(self,text="Y").grid(column=6,row=2) ttk.Label(self,text="W").grid(column=6,row=3) ttk.Label(self,text="H").grid(column=6,row=4) ttk.Label(self,text="Threshold").grid(column=6,row=5) ttk.Label(self,text="Time Delay").grid(column=6,row=6) ttk.Label(self,text="Advance Delay").grid(column=6,row=7) ttk.Label(self,text="Advance Delay 2").grid(column=6,row=8) ttk.Label(self,text="NPCs").grid(column=6,row=9) ttk.Label(self,text="NPCs during Timeline").grid(column=6,row=10) ttk.Label(self,text="Pokemon NPCs").grid(column=6,row=11) self.menu_check_var = tk.IntVar() self.menu_check = ttk.Checkbutton(self, text="+1 on menu close", variable=self.menu_check_var) self.menu_check.grid(column=7,row=0) self.menu_check_var.set(1) self.reident_noisy_check_var = tk.IntVar() self.reident_noisy_check = ttk.Checkbutton(self, text="Reident 1 PK NPC", variable=self.reident_noisy_check_var) self.reident_noisy_check.grid(column=5,row=6) self.reident_noisy_check_var.set(0) self.pos_x = tk.Spinbox(self, from_= 0, to = 99999, width = 5) self.pos_x.grid(column=7,row=1) self.pos_y = tk.Spinbox(self, from_= 0, to = 99999, width = 5) self.pos_y.grid(column=7,row=2) self.pos_w = tk.Spinbox(self, from_= 0, to = 99999, width = 5) self.pos_w.grid(column=7,row=3) self.pos_h = tk.Spinbox(self, from_= 0, to = 99999, width = 5) self.pos_h.grid(column=7,row=4) self.pos_th = tk.Spinbox(self, from_= 0, to = 1, width = 5, increment=0.1) self.pos_th.grid(column=7,row=5) self.whi_del = tk.Spinbox(self, from_= 0, to = 999, width = 5, increment=0.1) self.whi_del.grid(column=7,row=6) self.adv_del = tk.Spinbox(self, from_= 0, to = 999, width = 5, increment=1) self.adv_del.grid(column=7,row=7) self.adv_del_2 = tk.Spinbox(self, from_= 0, to = 999, width = 5, increment=1) self.adv_del_2.grid(column=7,row=8) self.npc = tk.Spinbox(self, from_= 0, to = 999, width = 5, increment=1) self.npc.grid(column=7,row=9) self.timeline_npc = tk.Spinbox(self, from_= -1, to = 999, width = 5, increment=1) self.timeline_npc.grid(column=7,row=10) self.pokemon_npc = tk.Spinbox(self, from_= 0, to = 999, width = 5, increment=1) self.pokemon_npc.grid(column=7,row=11) self.new_eye_button = ttk.Button(self, text="Select Eye",command=self.new_eye) self.new_eye_button.grid(column=6,row=12,columnspan=2) self.save_button = ttk.Button(self, text="Save Config",command=self.save_config) self.save_button.grid(column=6,row=13,columnspan=2) self.raw_screenshot_button = ttk.Button(self, text="Raw Screenshot",command=self.save_screenshot) self.raw_screenshot_button.grid(column=6,row=14,columnspan=2) self.s0_1_2_3 = tk.Text(self, width=10, height=4) self.s0_1_2_3.grid(column=1,row=2,rowspan=4) self.s01_23 = tk.Text(self, width=20, height=2) self.s01_23.grid(column=1,row=6,rowspan=4) self.advances = 0 self.adv = ttk.Label(self,text=self.advances) self.adv.grid(column=1,row=10) self.count_down = 0 self.cd = ttk.Label(self,text=self.count_down) self.cd.grid(column=1,row=11) self.advances_increase = tk.Spinbox(self, from_ = 0, to = 999999) self.advances_increase.grid(column=1,row=12) self.advances_increase_button = ttk.Button(self, text="Advance", command=self.increase_advances) self.advances_increase_button.grid(column=1,row=13) ttk.Label(self,text="Display Percent").grid(column=0,row=14) self.display_percent = tk.Spinbox(self, from_ = 0, to = 500) self.display_percent.grid(column=1,row=14) self.pos_x.delete(0, tk.END) self.pos_x.insert(0, x) self.pos_y.delete(0, tk.END) self.pos_y.insert(0, y) self.pos_w.delete(0, tk.END) self.pos_w.insert(0, w) self.pos_h.delete(0, tk.END) self.pos_h.insert(0, h) self.pos_th.delete(0, tk.END) self.pos_th.insert(0, th) self.whi_del.delete(0, tk.END) self.whi_del.insert(0, 0.0) self.adv_del.delete(0, tk.END) self.adv_del.insert(0, 0) self.adv_del_2.delete(0, tk.END) self.adv_del_2.insert(0, 0) self.npc.delete(0, tk.END) self.npc.insert(0, 0) self.timeline_npc.delete(0, tk.END) self.timeline_npc.insert(0, 0) self.pokemon_npc.delete(0, tk.END) self.pokemon_npc.insert(0, 0) self.camera_index.delete(0, tk.END) self.camera_index.insert(0, 0) self.advances_increase.delete(0, tk.END) self.advances_increase.insert(0, 165) self.display_percent.delete(0, tk.END) self.display_percent.insert(0, 100) self.after_task() def increase_advances(self): plus = int(self.advances_increase.get()) self.rng.advance(plus) self.advances += plus def new_config(self): with fd.asksaveasfile(initialdir="./configs/", filetypes=[("JSON", ".json")]) as f: json.dump(self.default_config,f,indent=4) self.config_combobox.set(os.path.basename(f.name)) self.config_combobox_onchange() def save_screenshot(self): with fd.asksaveasfile(initialdir="./", filetypes=[("PNG", ".png")]) as f: cv2.imwrite(f.name,self.raw_screenshot) def new_eye(self): self.config_json["image"] = "./"+os.path.relpath(fd.askopenfilename(initialdir="./images/", filetypes=[("Image", ".png")])).replace("\\","/") self.player_eye = cv2.imread(self.config_json["image"], cv2.IMREAD_GRAYSCALE) self.player_eye_tk = self.cv_image_to_tk(self.player_eye) self.eye_display['image'] = self.player_eye_tk def save_config(self): json.dump(self.config_json,open(join("configs",self.config_combobox.get()),"w"),indent=4) def cv_image_to_tk(self, image): split = cv2.split(image) if len(split) == 3: b,g,r = split image = cv2.merge((r,g,b)) im = Image.fromarray(image) return ImageTk.PhotoImage(image=im) def config_combobox_onchange(self, event=None): self.config_json = json.load(open(join("configs",self.config_combobox.get()))) missing = set(self.default_config.keys()).difference(self.config_json.keys()) if len(missing) > 0: print(f"Config was missing the following keys {missing}\nDefaults have been added") for key in missing: self.config_json[key] = self.default_config[key] x,y,w,h = self.config_json["view"] self.pos_x.delete(0, tk.END) self.pos_x.insert(0, x) self.pos_y.delete(0, tk.END) self.pos_y.insert(0, y) self.pos_w.delete(0, tk.END) self.pos_w.insert(0, w) self.pos_h.delete(0, tk.END) self.pos_h.insert(0, h) self.pos_th.delete(0, tk.END) self.pos_th.insert(0, self.config_json["thresh"]) self.whi_del.delete(0, tk.END) self.whi_del.insert(0, self.config_json["white_delay"]) self.adv_del.delete(0, tk.END) self.adv_del.insert(0, self.config_json["advance_delay"]) self.adv_del_2.delete(0, tk.END) self.adv_del_2.insert(0, self.config_json["advance_delay_2"]) self.npc.delete(0, tk.END) self.npc.insert(0, self.config_json["npc"]) self.pokemon_npc.delete(0, tk.END) self.pokemon_npc.insert(0, self.config_json["pokemon_npc"]) self.timeline_npc.delete(0, tk.END) self.timeline_npc.insert(0, self.config_json["timeline_npc"]) self.camera_index.delete(0, tk.END) self.camera_index.insert(0, self.config_json["camera"]) self.player_eye = cv2.imread(self.config_json["image"], cv2.IMREAD_GRAYSCALE) self.player_eye_tk = self.cv_image_to_tk(self.player_eye) self.eye_display['image'] = self.player_eye_tk self.prefix_input.delete(0, tk.END) self.prefix_input.insert(0, self.config_json["WindowPrefix"]) self.monitor_window_var.set(self.config_json["MonitorWindow"]) self.display_percent.delete(0, tk.END) self.display_percent.insert(0, self.config_json["display_percent"]) def stop_tracking(self): self.tracking = False def timeline(self): self.timelining = True def monitor_blinks(self): if not self.monitoring: self.monitor_blink_button['text'] = "Stop Monitoring" self.monitoring = True self.monitoring_thread=threading.Thread(target=self.monitoring_work) self.monitoring_thread.daemon = True self.monitoring_thread.start() else: self.monitor_blink_button['text'] = "Monitor Blinks" self.monitoring = False def reidentify(self): if not self.reidentifying: self.reidentify_button['text'] = "Stop Reidentifying" self.reidentifying = True self.reidentifying_thread=threading.Thread(target=self.reidentifying_work) self.reidentifying_thread.daemon = True self.reidentifying_thread.start() else: self.reidentify_button['text'] = "Reidentify" self.reidentifying = False def tidsid(self): if not self.tidsiding: self.tidsid_button['text'] = "Stop TID/SID" self.tidsiding = True self.tidsiding_thread=threading.Thread(target=self.tidsiding_work) self.tidsiding_thread.daemon = True self.tidsiding_thread.start() else: self.tidsid_button['text'] = "TID/SID" self.tidsiding = False def monitoring_work(self): self.tracking = False blinks, intervals, offset_time = rngtool.tracking_blink(self.player_eye, *self.config_json["view"], MonitorWindow=self.config_json["MonitorWindow"], WindowPrefix=self.config_json["WindowPrefix"], crop=self.config_json["crop"], camera=self.config_json["camera"], tk_window=self, th=self.config_json["thresh"]) self.rng = rngtool.recov(blinks, intervals, npc=self.config_json["npc"]) self.monitor_blink_button['text'] = "Monitor Blinks" self.monitoring = False self.preview() waituntil = time.perf_counter() diff = round(waituntil-offset_time)+(1 if self.menu_check_var.get() else 0) self.rng.getNextRandSequence(diff*(self.config_json["npc"]+1)) state = self.rng.getState() s0 = f"{state[0]:08X}" s1 = f"{state[1]:08X}" s2 = f"{state[2]:08X}" s3 = f"{state[3]:08X}" s01 = s0+s1 s23 = s2+s3 print(s01,s23) print(s0,s1,s2,s3) self.s0_1_2_3.delete(1.0, tk.END) self.s01_23.delete(1.0, tk.END) self.s0_1_2_3.insert(1.0,s0+"\n"+s1+"\n"+s2+"\n"+s3) self.s01_23.insert(1.0,s01+"\n"+s23) self.advances = 0 self.tracking = True self.count_down = None while self.tracking: if self.count_down is None: if self.timelining: self.count_down = 10 elif self.count_down != 0: self.count_down -= 1 print(self.count_down+1) else: break self.advances += self.config_json["npc"]+1 r = self.rng.getNextRandSequence(self.config_json["npc"]+1)[-1] waituntil += 1.018 print(f"advances:{self.advances}, blinks:{hex(r&0xF)}") next_time = waituntil - time.perf_counter() or 0 time.sleep(next_time) if self.timelining: self.rng.next() # white screen time.sleep(self.config_json["white_delay"]) waituntil = time.perf_counter() self.rng.advance(self.config_json["advance_delay"]) self.advances += self.config_json["advance_delay"] print("blink timeline started") queue = [] for _ in range(self.config_json["timeline_npc"]+1): heapq.heappush(queue, (waituntil+1.017,0)) for _ in range(self.config_json["pokemon_npc"]): blink_int = self.rng.rangefloat(3,12) + 0.285 heapq.heappush(queue, (waituntil+blink_int,1)) self.count_down = 10 while queue and self.tracking: self.advances += 1 w, q = heapq.heappop(queue) next_time = w - time.perf_counter() or 0 if next_time>0: time.sleep(next_time) if self.config_json["advance_delay_2"] != 0: if self.count_down > 0: self.count_down -= 1 print(self.count_down+1) elif self.count_down != -1: self.count_down -= 1 self.advances += self.config_json["advance_delay_2"] self.rng.advance(self.config_json["advance_delay_2"]) if q==0: r = self.rng.next() print(f"advances:{self.advances}, blink:{hex(r&0xF)}") heapq.heappush(queue, (w+1.017, 0)) else: blink_int = self.rng.rangefloat(3,12) + 0.285 heapq.heappush(queue, (w+blink_int, 1)) print(f"advances:{self.advances}, interval:{blink_int}") self.timelining = False def tidsiding_work(self): self.tracking = False munchlax_intervals = rngtool.tracking_poke_blink(self.player_eye, *self.config_json["view"], MonitorWindow=self.config_json["MonitorWindow"], WindowPrefix=self.config_json["WindowPrefix"], crop=self.config_json["crop"], camera=self.config_json["camera"], tk_window=self, th=self.config_json["thresh"], size=64) self.rng = rngtool.recovByMunchlax(munchlax_intervals) state = self.rng.getState() self.tidsid_button['text'] = "TID/SID" self.tidsiding = False self.preview() s0 = f"{state[0]:08X}" s1 = f"{state[1]:08X}" s2 = f"{state[2]:08X}" s3 = f"{state[3]:08X}" s01 = s0+s1 s23 = s2+s3 print(s01,s23) print(s0,s1,s2,s3) self.s0_1_2_3.delete(1.0, tk.END) self.s01_23.delete(1.0, tk.END) self.s0_1_2_3.insert(1.0,s0+"\n"+s1+"\n"+s2+"\n"+s3) self.s01_23.insert(1.0,s01+"\n"+s23) waituntil = time.perf_counter() ts = time.time() print([hex(x) for x in state],ts) self.tracking = True while self.tracking: self.advances += 1 interval = self.rng.rangefloat(3.0,12.0) + 0.285 waituntil += interval print(f"advances:{self.advances}") next_time = waituntil - time.perf_counter() or 0 time.sleep(next_time) def reidentifying_work(self): self.tracking = False state = [int(x,16) for x in self.s0_1_2_3.get(1.0,tk.END).split("\n")[:4]] s0 = f"{state[0]:08X}" s1 = f"{state[1]:08X}" s2 = f"{state[2]:08X}" s3 = f"{state[3]:08X}" s01 = s0+s1 s23 = s2+s3 print(s01,s23) print(s0,s1,s2,s3) self.s0_1_2_3.delete(1.0, tk.END) self.s01_23.delete(1.0, tk.END) self.s0_1_2_3.insert(1.0,s0+"\n"+s1+"\n"+s2+"\n"+s3) self.s01_23.insert(1.0,s01+"\n"+s23) print([hex(x) for x in state]) if self.reident_noisy_check_var.get(): self.pokemon_npc.delete(0,tk.END) self.pokemon_npc.insert(0,1) observed_blinks, observed_intervals, offset_time = rngtool.tracking_blink(self.player_eye, *self.config_json["view"], MonitorWindow=self.config_json["MonitorWindow"], WindowPrefix=self.config_json["WindowPrefix"], crop=self.config_json["crop"], camera=self.config_json["camera"], tk_window=self, th=self.config_json["thresh"], size=20) self.rng, adv = rngtool.reidentifyByIntervalsNoisy(Xorshift(*state), observed_intervals) self.timelining = True self.count_down = 0 auto_timeline = True else: observed_blinks, observed_intervals, offset_time = rngtool.tracking_blink(self.player_eye, *self.config_json["view"], MonitorWindow=self.config_json["MonitorWindow"], WindowPrefix=self.config_json["WindowPrefix"], crop=self.config_json["crop"], camera=self.config_json["camera"], tk_window=self, th=self.config_json["thresh"], size=7) self.rng, adv = rngtool.reidentifyByIntervals(Xorshift(*state), observed_intervals, return_advance=True, npc=self.config_json["npc"]) auto_timeline = False self.reidentify_button['text'] = "Reidentify" self.reidentifying = False self.preview() waituntil = time.perf_counter() diff = round(waituntil-offset_time)+(1 if self.menu_check_var.get() else 0) self.rng.getNextRandSequence(diff*(self.config_json["npc"]+1)) state = self.rng.getState() self.advances = adv+diff*(self.config_json["npc"]+1) self.tracking = True if not auto_timeline: self.count_down = None while self.tracking: if self.count_down is None: if self.timelining: self.count_down = 10 elif self.count_down != 0: self.count_down -= 1 print(self.count_down+1) else: break self.advances += self.config_json["npc"]+1 r = self.rng.getNextRandSequence(self.config_json["npc"]+1)[-1] waituntil += 1.018 print(f"advances:{self.advances}, blinks:{hex(r&0xF)}") next_time = waituntil - time.perf_counter() or 0 time.sleep(next_time) if self.timelining: self.rng.next() # white screen time.sleep(self.config_json["white_delay"]) waituntil = time.perf_counter() self.rng.advance(self.config_json["advance_delay"]) self.advances += self.config_json["advance_delay"] print("blink timeline started") queue = [] for _ in range(self.config_json["timeline_npc"]+1): heapq.heappush(queue, (waituntil+1.017,0)) for _ in range(self.config_json["pokemon_npc"]): blink_int = self.rng.rangefloat(3,12) + 0.285 heapq.heappush(queue, (waituntil+blink_int,1)) self.count_down = 10 while queue and self.tracking: self.advances += 1 w, q = heapq.heappop(queue) next_time = w - time.perf_counter() or 0 if next_time>0: time.sleep(next_time) if self.config_json["advance_delay_2"] != 0: if self.count_down > 0: self.count_down -= 1 print(self.count_down+1) elif self.count_down != -1: self.count_down -= 1 self.advances += self.config_json["advance_delay_2"] self.rng.advance(self.config_json["advance_delay_2"]) if q==0: r = self.rng.next() print(f"advances:{self.advances}, blink:{hex(r&0xF)}") heapq.heappush(queue, (w+1.017, 0)) else: blink_int = self.rng.rangefloat(3,12) + 0.285 heapq.heappush(queue, (w+blink_int, 1)) print(f"advances:{self.advances}, interval:{blink_int}") self.timelining = False def preview(self): if not self.previewing: self.preview_button['text'] = "Stop Preview" self.previewing = True self.previewing_thread=threading.Thread(target=self.previewing_work) self.previewing_thread.daemon = True self.previewing_thread.start() else: self.preview_button['text'] = "Preview" self.previewing = False def previewing_work(self): last_frame_tk = None last_camera = self.config_json["camera"] if self.config_json["MonitorWindow"]: from windowcapture import WindowCapture video = WindowCapture(self.config_json["WindowPrefix"],self.config_json["crop"]) else: if sys.platform.startswith('linux'): # all Linux backend = cv2.CAP_V4L elif sys.platform.startswith('win'): # MS Windows backend = cv2.CAP_DSHOW elif sys.platform.startswith('darwin'): # macOS backend = cv2.CAP_ANY else: backend = cv2.CAP_ANY # auto-detect via OpenCV video = cv2.VideoCapture(self.config_json["camera"],backend) video.set(cv2.CAP_PROP_FRAME_WIDTH,1920) video.set(cv2.CAP_PROP_FRAME_HEIGHT,1080) video.set(cv2.CAP_PROP_BUFFERSIZE,1) print(f"camera {self.config_json['camera']}") while self.previewing: if self.config_json["camera"] != last_camera: video = cv2.VideoCapture(self.config_json["camera"],backend) video.set(cv2.CAP_PROP_FRAME_WIDTH,1920) video.set(cv2.CAP_PROP_FRAME_HEIGHT,1080) video.set(cv2.CAP_PROP_BUFFERSIZE,1) print(f"camera {self.config_json['camera']}") last_camera = self.config_json["camera"] eye = self.player_eye w, h = eye.shape[::-1] roi_x, roi_y, roi_w, roi_h = self.config_json["view"] _, frame = video.read() if frame is not None: roi = cv2.cvtColor(frame[roi_y:roi_y+roi_h,roi_x:roi_x+roi_w],cv2.COLOR_RGB2GRAY) res = cv2.matchTemplate(roi,eye,cv2.TM_CCOEFF_NORMED) _, match, _, max_loc = cv2.minMaxLoc(res) cv2.rectangle(frame,(roi_x,roi_y), (roi_x+roi_w,roi_y+roi_h), (0,0,255), 2) if 0.01<match<self.config_json["thresh"]: cv2.rectangle(frame,(roi_x,roi_y), (roi_x+roi_w,roi_y+roi_h), 255, 2) else: max_loc = (max_loc[0] + roi_x,max_loc[1] + roi_y) bottom_right = (max_loc[0] + w, max_loc[1] + h) cv2.rectangle(frame,max_loc, bottom_right, 255, 2) self.raw_screenshot = frame if self.config_json["display_percent"] != 100: _, fw, fh = frame.shape[::-1] frame = cv2.resize(frame,(round(fw*self.config_json["display_percent"]/100),round(fh*self.config_json["display_percent"]/100))) frame_tk = self.cv_image_to_tk(frame) self.monitor_tk_buffer = last_frame_tk self.monitor_display_buffer['image'] = self.monitor_tk_buffer self.monitor_tk = frame_tk self.monitor_display['image'] = self.monitor_tk last_frame_tk = frame_tk self.monitor_tk_buffer = None self.monitor_tk = None def after_task(self): self.config_json["view"] = [int(self.pos_x.get()),int(self.pos_y.get()),int(self.pos_w.get()),int(self.pos_h.get())] self.config_json["thresh"] = float(self.pos_th.get()) self.config_json["WindowPrefix"] = self.prefix_input.get() self.config_json["white_delay"] = float(self.whi_del.get()) self.config_json["advance_delay"] = int(self.adv_del.get()) self.config_json["advance_delay_2"] = int(self.adv_del_2.get()) self.config_json["npc"] = int(self.npc.get()) self.config_json["pokemon_npc"] = int(self.pokemon_npc.get()) self.config_json["timeline_npc"] = int(self.timeline_npc.get()) self.config_json["MonitorWindow"] = bool(self.monitor_window_var.get()) self.config_json["camera"] = int(self.camera_index.get()) self.config_json["display_percent"] = int(self.display_percent.get()) self.adv['text'] = self.advances self.cd['text'] = self.count_down self.after(100,self.after_task) def signal_handler(self, signal, frame): sys.exit(0) root = tk.Tk() app = Application(master=root) app.mainloop()
test_double_spend.py
# Copyright © 2020 Interplanetary Database Association e.V., # Planetmint and IPDB software contributors. # SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0) # Code is Apache-2.0 and docs are CC-BY-4.0 # # Double Spend testing # This test challenge the system with double spends. import os from uuid import uuid4 from threading import Thread import queue import planetmint_driver.exceptions from planetmint_driver import Planetmint from planetmint_driver.crypto import generate_keypair def test_double_create(): bdb = Planetmint(os.environ.get('PLANETMINT_ENDPOINT')) alice = generate_keypair() results = queue.Queue() tx = bdb.transactions.fulfill( bdb.transactions.prepare( operation='CREATE', signers=alice.public_key, asset={'data': {'uuid': str(uuid4())}}), private_keys=alice.private_key) def send_and_queue(tx): try: bdb.transactions.send_commit(tx) results.put('OK') except planetmint_driver.exceptions.TransportError as e: results.put('FAIL') t1 = Thread(target=send_and_queue, args=(tx, )) t2 = Thread(target=send_and_queue, args=(tx, )) t1.start() t2.start() results = [results.get(timeout=2), results.get(timeout=2)] assert results.count('OK') == 1 assert results.count('FAIL') == 1
tasks.py
# -*- coding: utf-8 -*- # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. # Python from collections import OrderedDict, namedtuple import errno import functools import importlib import json import logging import os import shutil import stat import tempfile import time import traceback from distutils.dir_util import copy_tree from distutils.version import LooseVersion as Version import yaml import fcntl from pathlib import Path from uuid import uuid4 import urllib.parse as urlparse import socket import threading import concurrent.futures from base64 import b64encode import subprocess # Django from django.conf import settings from django.db import transaction, DatabaseError, IntegrityError, ProgrammingError, connection from django.db.models.fields.related import ForeignKey from django.utils.timezone import now from django.utils.encoding import smart_str from django.contrib.auth.models import User from django.utils.translation import ugettext_lazy as _, gettext_noop from django.core.cache import cache from django.core.exceptions import ObjectDoesNotExist from django_guid.middleware import GuidMiddleware # Django-CRUM from crum import impersonate # GitPython import git from gitdb.exc import BadName as BadGitName # Runner import ansible_runner # Receptor from receptorctl.socket_interface import ReceptorControl # AWX from awx import __version__ as awx_application_version from awx.main.constants import PRIVILEGE_ESCALATION_METHODS, STANDARD_INVENTORY_UPDATE_ENV from awx.main.access import access_registry from awx.main.redact import UriCleaner from awx.main.models import ( Schedule, TowerScheduleState, Instance, InstanceGroup, UnifiedJob, Notification, Inventory, InventorySource, SmartInventoryMembership, Job, AdHocCommand, ProjectUpdate, InventoryUpdate, SystemJob, JobEvent, ProjectUpdateEvent, InventoryUpdateEvent, AdHocCommandEvent, SystemJobEvent, build_safe_env, ) from awx.main.constants import ACTIVE_STATES from awx.main.exceptions import AwxTaskError, PostRunError from awx.main.queue import CallbackQueueDispatcher from awx.main.dispatch.publish import task from awx.main.dispatch import get_local_queuename, reaper from awx.main.utils import ( update_scm_url, ignore_inventory_computed_fields, ignore_inventory_group_removal, extract_ansible_vars, schedule_task_manager, get_awx_version, deepmerge, parse_yaml_or_json, ) from awx.main.utils.execution_environments import get_default_execution_environment, get_default_pod_spec from awx.main.utils.ansible import read_ansible_config from awx.main.utils.external_logging import reconfigure_rsyslog from awx.main.utils.safe_yaml import safe_dump, sanitize_jinja from awx.main.utils.reload import stop_local_services from awx.main.utils.pglock import advisory_lock from awx.main.utils.handlers import SpecialInventoryHandler from awx.main.consumers import emit_channel_notification from awx.main import analytics from awx.conf import settings_registry from awx.conf.license import get_license from awx.main.analytics.subsystem_metrics import Metrics from rest_framework.exceptions import PermissionDenied __all__ = [ 'RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate', 'RunAdHocCommand', 'handle_work_error', 'handle_work_success', 'apply_cluster_membership_policies', 'update_inventory_computed_fields', 'update_host_smart_inventory_memberships', 'send_notifications', 'purge_old_stdout_files', ] HIDDEN_PASSWORD = '**********' OPENSSH_KEY_ERROR = u'''\ It looks like you're trying to use a private key in OpenSSH format, which \ isn't supported by the installed version of OpenSSH on this instance. \ Try upgrading OpenSSH or providing your private key in an different format. \ ''' logger = logging.getLogger('awx.main.tasks') class InvalidVirtualenvError(Exception): def __init__(self, message): self.message = message def dispatch_startup(): startup_logger = logging.getLogger('awx.main.tasks') startup_logger.debug("Syncing Schedules") for sch in Schedule.objects.all(): try: sch.update_computed_fields() except Exception: logger.exception("Failed to rebuild schedule {}.".format(sch)) # # When the dispatcher starts, if the instance cannot be found in the database, # automatically register it. This is mostly useful for openshift-based # deployments where: # # 2 Instances come online # Instance B encounters a network blip, Instance A notices, and # deprovisions it # Instance B's connectivity is restored, the dispatcher starts, and it # re-registers itself # # In traditional container-less deployments, instances don't get # deprovisioned when they miss their heartbeat, so this code is mostly a # no-op. # apply_cluster_membership_policies() cluster_node_heartbeat() Metrics().clear_values() # Update Tower's rsyslog.conf file based on loggins settings in the db reconfigure_rsyslog() def inform_cluster_of_shutdown(): try: this_inst = Instance.objects.get(hostname=settings.CLUSTER_HOST_ID) this_inst.capacity = 0 # No thank you to new jobs while shut down this_inst.save(update_fields=['capacity', 'modified']) try: reaper.reap(this_inst) except Exception: logger.exception('failed to reap jobs for {}'.format(this_inst.hostname)) logger.warning('Normal shutdown signal for instance {}, ' 'removed self from capacity pool.'.format(this_inst.hostname)) except Exception: logger.exception('Encountered problem with normal shutdown signal.') @task(queue=get_local_queuename) def apply_cluster_membership_policies(): started_waiting = time.time() with advisory_lock('cluster_policy_lock', wait=True): lock_time = time.time() - started_waiting if lock_time > 1.0: to_log = logger.info else: to_log = logger.debug to_log('Waited {} seconds to obtain lock name: cluster_policy_lock'.format(lock_time)) started_compute = time.time() all_instances = list(Instance.objects.order_by('id')) all_groups = list(InstanceGroup.objects.prefetch_related('instances')) total_instances = len(all_instances) actual_groups = [] actual_instances = [] Group = namedtuple('Group', ['obj', 'instances', 'prior_instances']) Node = namedtuple('Instance', ['obj', 'groups']) # Process policy instance list first, these will represent manually managed memberships instance_hostnames_map = {inst.hostname: inst for inst in all_instances} for ig in all_groups: group_actual = Group(obj=ig, instances=[], prior_instances=[instance.pk for instance in ig.instances.all()]) # obtained in prefetch for hostname in ig.policy_instance_list: if hostname not in instance_hostnames_map: logger.info("Unknown instance {} in {} policy list".format(hostname, ig.name)) continue inst = instance_hostnames_map[hostname] group_actual.instances.append(inst.id) # NOTE: arguable behavior: policy-list-group is not added to # instance's group count for consideration in minimum-policy rules if group_actual.instances: logger.debug("Policy List, adding Instances {} to Group {}".format(group_actual.instances, ig.name)) actual_groups.append(group_actual) # Process Instance minimum policies next, since it represents a concrete lower bound to the # number of instances to make available to instance groups actual_instances = [Node(obj=i, groups=[]) for i in all_instances if i.managed_by_policy] logger.debug("Total instances: {}, available for policy: {}".format(total_instances, len(actual_instances))) for g in sorted(actual_groups, key=lambda x: len(x.instances)): policy_min_added = [] for i in sorted(actual_instances, key=lambda x: len(x.groups)): if len(g.instances) >= g.obj.policy_instance_minimum: break if i.obj.id in g.instances: # If the instance is already _in_ the group, it was # applied earlier via the policy list continue g.instances.append(i.obj.id) i.groups.append(g.obj.id) policy_min_added.append(i.obj.id) if policy_min_added: logger.debug("Policy minimum, adding Instances {} to Group {}".format(policy_min_added, g.obj.name)) # Finally, process instance policy percentages for g in sorted(actual_groups, key=lambda x: len(x.instances)): policy_per_added = [] for i in sorted(actual_instances, key=lambda x: len(x.groups)): if i.obj.id in g.instances: # If the instance is already _in_ the group, it was # applied earlier via a minimum policy or policy list continue if 100 * float(len(g.instances)) / len(actual_instances) >= g.obj.policy_instance_percentage: break g.instances.append(i.obj.id) i.groups.append(g.obj.id) policy_per_added.append(i.obj.id) if policy_per_added: logger.debug("Policy percentage, adding Instances {} to Group {}".format(policy_per_added, g.obj.name)) # Determine if any changes need to be made needs_change = False for g in actual_groups: if set(g.instances) != set(g.prior_instances): needs_change = True break if not needs_change: logger.debug('Cluster policy no-op finished in {} seconds'.format(time.time() - started_compute)) return # On a differential basis, apply instances to groups with transaction.atomic(): for g in actual_groups: if g.obj.is_container_group: logger.debug('Skipping containerized group {} for policy calculation'.format(g.obj.name)) continue instances_to_add = set(g.instances) - set(g.prior_instances) instances_to_remove = set(g.prior_instances) - set(g.instances) if instances_to_add: logger.debug('Adding instances {} to group {}'.format(list(instances_to_add), g.obj.name)) g.obj.instances.add(*instances_to_add) if instances_to_remove: logger.debug('Removing instances {} from group {}'.format(list(instances_to_remove), g.obj.name)) g.obj.instances.remove(*instances_to_remove) logger.debug('Cluster policy computation finished in {} seconds'.format(time.time() - started_compute)) @task(queue='tower_broadcast_all') def handle_setting_changes(setting_keys): orig_len = len(setting_keys) for i in range(orig_len): for dependent_key in settings_registry.get_dependent_settings(setting_keys[i]): setting_keys.append(dependent_key) cache_keys = set(setting_keys) logger.debug('cache delete_many(%r)', cache_keys) cache.delete_many(cache_keys) if any([setting.startswith('LOG_AGGREGATOR') for setting in setting_keys]): reconfigure_rsyslog() @task(queue='tower_broadcast_all') def delete_project_files(project_path): # TODO: possibly implement some retry logic lock_file = project_path + '.lock' if os.path.exists(project_path): try: shutil.rmtree(project_path) logger.debug('Success removing project files {}'.format(project_path)) except Exception: logger.exception('Could not remove project directory {}'.format(project_path)) if os.path.exists(lock_file): try: os.remove(lock_file) logger.debug('Success removing {}'.format(lock_file)) except Exception: logger.exception('Could not remove lock file {}'.format(lock_file)) @task(queue='tower_broadcast_all') def profile_sql(threshold=1, minutes=1): if threshold <= 0: cache.delete('awx-profile-sql-threshold') logger.error('SQL PROFILING DISABLED') else: cache.set('awx-profile-sql-threshold', threshold, timeout=minutes * 60) logger.error('SQL QUERIES >={}s ENABLED FOR {} MINUTE(S)'.format(threshold, minutes)) @task(queue=get_local_queuename) def send_notifications(notification_list, job_id=None): if not isinstance(notification_list, list): raise TypeError("notification_list should be of type list") if job_id is not None: job_actual = UnifiedJob.objects.get(id=job_id) notifications = Notification.objects.filter(id__in=notification_list) if job_id is not None: job_actual.notifications.add(*notifications) for notification in notifications: update_fields = ['status', 'notifications_sent'] try: sent = notification.notification_template.send(notification.subject, notification.body) notification.status = "successful" notification.notifications_sent = sent if job_id is not None: job_actual.log_lifecycle("notifications_sent") except Exception as e: logger.exception("Send Notification Failed {}".format(e)) notification.status = "failed" notification.error = smart_str(e) update_fields.append('error') finally: try: notification.save(update_fields=update_fields) except Exception: logger.exception('Error saving notification {} result.'.format(notification.id)) @task(queue=get_local_queuename) def gather_analytics(): from awx.conf.models import Setting from rest_framework.fields import DateTimeField last_gather = Setting.objects.filter(key='AUTOMATION_ANALYTICS_LAST_GATHER').first() last_time = DateTimeField().to_internal_value(last_gather.value) if last_gather and last_gather.value else None gather_time = now() if not last_time or ((gather_time - last_time).total_seconds() > settings.AUTOMATION_ANALYTICS_GATHER_INTERVAL): analytics.gather() @task(queue=get_local_queuename) def purge_old_stdout_files(): nowtime = time.time() for f in os.listdir(settings.JOBOUTPUT_ROOT): if os.path.getctime(os.path.join(settings.JOBOUTPUT_ROOT, f)) < nowtime - settings.LOCAL_STDOUT_EXPIRE_TIME: os.unlink(os.path.join(settings.JOBOUTPUT_ROOT, f)) logger.debug("Removing {}".format(os.path.join(settings.JOBOUTPUT_ROOT, f))) @task(queue=get_local_queuename) def cleanup_execution_environment_images(): if settings.IS_K8S: return process = subprocess.run('podman images --filter="dangling=true" --format json'.split(" "), capture_output=True) if process.returncode != 0: logger.debug("Cleanup execution environment images: could not get list of images") return if len(process.stdout) > 0: images_system = json.loads(process.stdout) for e in images_system: image_name = e["Id"] logger.debug(f"Cleanup execution environment images: deleting {image_name}") process = subprocess.run(['podman', 'rmi', image_name, '-f'], stdout=subprocess.DEVNULL) if process.returncode != 0: logger.debug(f"Failed to delete image {image_name}") @task(queue=get_local_queuename) def cluster_node_heartbeat(): logger.debug("Cluster node heartbeat task.") nowtime = now() instance_list = list(Instance.objects.all()) this_inst = None lost_instances = [] (changed, instance) = Instance.objects.get_or_register() if changed: logger.info("Registered tower node '{}'".format(instance.hostname)) for inst in list(instance_list): if inst.hostname == settings.CLUSTER_HOST_ID: this_inst = inst instance_list.remove(inst) elif inst.is_lost(ref_time=nowtime): lost_instances.append(inst) instance_list.remove(inst) if this_inst: startup_event = this_inst.is_lost(ref_time=nowtime) this_inst.refresh_capacity() if startup_event: logger.warning('Rejoining the cluster as instance {}.'.format(this_inst.hostname)) return else: raise RuntimeError("Cluster Host Not Found: {}".format(settings.CLUSTER_HOST_ID)) # IFF any node has a greater version than we do, then we'll shutdown services for other_inst in instance_list: if other_inst.version == "": continue if Version(other_inst.version.split('-', 1)[0]) > Version(awx_application_version.split('-', 1)[0]) and not settings.DEBUG: logger.error( "Host {} reports version {}, but this node {} is at {}, shutting down".format( other_inst.hostname, other_inst.version, this_inst.hostname, this_inst.version ) ) # Shutdown signal will set the capacity to zero to ensure no Jobs get added to this instance. # The heartbeat task will reset the capacity to the system capacity after upgrade. stop_local_services(communicate=False) raise RuntimeError("Shutting down.") for other_inst in lost_instances: try: reaper.reap(other_inst) except Exception: logger.exception('failed to reap jobs for {}'.format(other_inst.hostname)) try: # Capacity could already be 0 because: # * It's a new node and it never had a heartbeat # * It was set to 0 by another tower node running this method # * It was set to 0 by this node, but auto deprovisioning is off # # If auto deprovisining is on, don't bother setting the capacity to 0 # since we will delete the node anyway. if other_inst.capacity != 0 and not settings.AWX_AUTO_DEPROVISION_INSTANCES: other_inst.capacity = 0 other_inst.save(update_fields=['capacity']) logger.error("Host {} last checked in at {}, marked as lost.".format(other_inst.hostname, other_inst.modified)) elif settings.AWX_AUTO_DEPROVISION_INSTANCES: deprovision_hostname = other_inst.hostname other_inst.delete() logger.info("Host {} Automatically Deprovisioned.".format(deprovision_hostname)) except DatabaseError as e: if 'did not affect any rows' in str(e): logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname)) else: logger.exception('Error marking {} as lost'.format(other_inst.hostname)) @task(queue=get_local_queuename) def awx_k8s_reaper(): if not settings.RECEPTOR_RELEASE_WORK: return from awx.main.scheduler.kubernetes import PodManager # prevent circular import for group in InstanceGroup.objects.filter(is_container_group=True).iterator(): logger.debug("Checking for orphaned k8s pods for {}.".format(group)) pods = PodManager.list_active_jobs(group) for job in UnifiedJob.objects.filter(pk__in=pods.keys()).exclude(status__in=ACTIVE_STATES): logger.debug('{} is no longer active, reaping orphaned k8s pod'.format(job.log_format)) try: pm = PodManager(job) pm.kube_api.delete_namespaced_pod(name=pods[job.id], namespace=pm.namespace, _request_timeout=settings.AWX_CONTAINER_GROUP_K8S_API_TIMEOUT) except Exception: logger.exception("Failed to delete orphaned pod {} from {}".format(job.log_format, group)) @task(queue=get_local_queuename) def awx_periodic_scheduler(): with advisory_lock('awx_periodic_scheduler_lock', wait=False) as acquired: if acquired is False: logger.debug("Not running periodic scheduler, another task holds lock") return logger.debug("Starting periodic scheduler") run_now = now() state = TowerScheduleState.get_solo() last_run = state.schedule_last_run logger.debug("Last scheduler run was: %s", last_run) state.schedule_last_run = run_now state.save() old_schedules = Schedule.objects.enabled().before(last_run) for schedule in old_schedules: schedule.update_computed_fields() schedules = Schedule.objects.enabled().between(last_run, run_now) invalid_license = False try: access_registry[Job](None).check_license(quiet=True) except PermissionDenied as e: invalid_license = e for schedule in schedules: template = schedule.unified_job_template schedule.update_computed_fields() # To update next_run timestamp. if template.cache_timeout_blocked: logger.warn("Cache timeout is in the future, bypassing schedule for template %s" % str(template.id)) continue try: job_kwargs = schedule.get_job_kwargs() new_unified_job = schedule.unified_job_template.create_unified_job(**job_kwargs) logger.debug('Spawned {} from schedule {}-{}.'.format(new_unified_job.log_format, schedule.name, schedule.pk)) if invalid_license: new_unified_job.status = 'failed' new_unified_job.job_explanation = str(invalid_license) new_unified_job.save(update_fields=['status', 'job_explanation']) new_unified_job.websocket_emit_status("failed") raise invalid_license can_start = new_unified_job.signal_start() except Exception: logger.exception('Error spawning scheduled job.') continue if not can_start: new_unified_job.status = 'failed' new_unified_job.job_explanation = gettext_noop( "Scheduled job could not start because it \ was not in the right state or required manual credentials" ) new_unified_job.save(update_fields=['status', 'job_explanation']) new_unified_job.websocket_emit_status("failed") emit_channel_notification('schedules-changed', dict(id=schedule.id, group_name="schedules")) state.save() @task(queue=get_local_queuename) def handle_work_success(task_actual): try: instance = UnifiedJob.get_instance_by_type(task_actual['type'], task_actual['id']) except ObjectDoesNotExist: logger.warning('Missing {} `{}` in success callback.'.format(task_actual['type'], task_actual['id'])) return if not instance: return schedule_task_manager() @task(queue=get_local_queuename) def handle_work_error(task_id, *args, **kwargs): subtasks = kwargs.get('subtasks', None) logger.debug('Executing error task id %s, subtasks: %s' % (task_id, str(subtasks))) first_instance = None first_instance_type = '' if subtasks is not None: for each_task in subtasks: try: instance = UnifiedJob.get_instance_by_type(each_task['type'], each_task['id']) if not instance: # Unknown task type logger.warn("Unknown task type: {}".format(each_task['type'])) continue except ObjectDoesNotExist: logger.warning('Missing {} `{}` in error callback.'.format(each_task['type'], each_task['id'])) continue if first_instance is None: first_instance = instance first_instance_type = each_task['type'] if instance.celery_task_id != task_id and not instance.cancel_flag and not instance.status == 'successful': instance.status = 'failed' instance.failed = True if not instance.job_explanation: instance.job_explanation = 'Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' % ( first_instance_type, first_instance.name, first_instance.id, ) instance.save() instance.websocket_emit_status("failed") # We only send 1 job complete message since all the job completion message # handling does is trigger the scheduler. If we extend the functionality of # what the job complete message handler does then we may want to send a # completion event for each job here. if first_instance: schedule_task_manager() pass @task(queue=get_local_queuename) def handle_success_and_failure_notifications(job_id): uj = UnifiedJob.objects.get(pk=job_id) retries = 0 while retries < 5: if uj.finished: uj.send_notification_templates('succeeded' if uj.status == 'successful' else 'failed') return else: # wait a few seconds to avoid a race where the # events are persisted _before_ the UJ.status # changes from running -> successful retries += 1 time.sleep(1) uj = UnifiedJob.objects.get(pk=job_id) logger.warn(f"Failed to even try to send notifications for job '{uj}' due to job not being in finished state.") @task(queue=get_local_queuename) def update_inventory_computed_fields(inventory_id): """ Signal handler and wrapper around inventory.update_computed_fields to prevent unnecessary recursive calls. """ i = Inventory.objects.filter(id=inventory_id) if not i.exists(): logger.error("Update Inventory Computed Fields failed due to missing inventory: " + str(inventory_id)) return i = i[0] try: i.update_computed_fields() except DatabaseError as e: if 'did not affect any rows' in str(e): logger.debug('Exiting duplicate update_inventory_computed_fields task.') return raise def update_smart_memberships_for_inventory(smart_inventory): current = set(SmartInventoryMembership.objects.filter(inventory=smart_inventory).values_list('host_id', flat=True)) new = set(smart_inventory.hosts.values_list('id', flat=True)) additions = new - current removals = current - new if additions or removals: with transaction.atomic(): if removals: SmartInventoryMembership.objects.filter(inventory=smart_inventory, host_id__in=removals).delete() if additions: add_for_inventory = [SmartInventoryMembership(inventory_id=smart_inventory.id, host_id=host_id) for host_id in additions] SmartInventoryMembership.objects.bulk_create(add_for_inventory, ignore_conflicts=True) logger.debug( 'Smart host membership cached for {}, {} additions, {} removals, {} total count.'.format( smart_inventory.pk, len(additions), len(removals), len(new) ) ) return True # changed return False @task(queue=get_local_queuename) def update_host_smart_inventory_memberships(): smart_inventories = Inventory.objects.filter(kind='smart', host_filter__isnull=False, pending_deletion=False) changed_inventories = set([]) for smart_inventory in smart_inventories: try: changed = update_smart_memberships_for_inventory(smart_inventory) if changed: changed_inventories.add(smart_inventory) except IntegrityError: logger.exception('Failed to update smart inventory memberships for {}'.format(smart_inventory.pk)) # Update computed fields for changed inventories outside atomic action for smart_inventory in changed_inventories: smart_inventory.update_computed_fields() @task(queue=get_local_queuename) def migrate_legacy_event_data(tblname): # # NOTE: this function is not actually in use anymore, # but has been intentionally kept for historical purposes, # and to serve as an illustration if we ever need to perform # bulk modification/migration of event data in the future. # if 'event' not in tblname: return with advisory_lock(f'bigint_migration_{tblname}', wait=False) as acquired: if acquired is False: return chunk = settings.JOB_EVENT_MIGRATION_CHUNK_SIZE def _remaining(): try: cursor.execute(f'SELECT MAX(id) FROM _old_{tblname};') return cursor.fetchone()[0] except ProgrammingError: # the table is gone (migration is unnecessary) return None with connection.cursor() as cursor: total_rows = _remaining() while total_rows: with transaction.atomic(): cursor.execute(f'INSERT INTO {tblname} SELECT * FROM _old_{tblname} ORDER BY id DESC LIMIT {chunk} RETURNING id;') last_insert_pk = cursor.fetchone() if last_insert_pk is None: # this means that the SELECT from the old table was # empty, and there was nothing to insert (so we're done) break last_insert_pk = last_insert_pk[0] cursor.execute(f'DELETE FROM _old_{tblname} WHERE id IN (SELECT id FROM _old_{tblname} ORDER BY id DESC LIMIT {chunk});') logger.warn(f'migrated int -> bigint rows to {tblname} from _old_{tblname}; # ({last_insert_pk} rows remaining)') if _remaining() is None: cursor.execute(f'DROP TABLE IF EXISTS _old_{tblname}') logger.warn(f'{tblname} primary key migration to bigint has finished') @task(queue=get_local_queuename) def delete_inventory(inventory_id, user_id, retries=5): # Delete inventory as user if user_id is None: user = None else: try: user = User.objects.get(id=user_id) except Exception: user = None with ignore_inventory_computed_fields(), ignore_inventory_group_removal(), impersonate(user): try: i = Inventory.objects.get(id=inventory_id) for host in i.hosts.iterator(): host.job_events_as_primary_host.update(host=None) i.delete() emit_channel_notification('inventories-status_changed', {'group_name': 'inventories', 'inventory_id': inventory_id, 'status': 'deleted'}) logger.debug('Deleted inventory {} as user {}.'.format(inventory_id, user_id)) except Inventory.DoesNotExist: logger.exception("Delete Inventory failed due to missing inventory: " + str(inventory_id)) return except DatabaseError: logger.exception('Database error deleting inventory {}, but will retry.'.format(inventory_id)) if retries > 0: time.sleep(10) delete_inventory(inventory_id, user_id, retries=retries - 1) def with_path_cleanup(f): @functools.wraps(f) def _wrapped(self, *args, **kwargs): try: return f(self, *args, **kwargs) finally: for p in self.cleanup_paths: try: if os.path.isdir(p): shutil.rmtree(p, ignore_errors=True) elif os.path.exists(p): os.remove(p) except OSError: logger.exception("Failed to remove tmp file: {}".format(p)) self.cleanup_paths = [] return _wrapped class BaseTask(object): model = None event_model = None abstract = True def __init__(self): self.cleanup_paths = [] self.parent_workflow_job_id = None self.host_map = {} self.guid = GuidMiddleware.get_guid() def update_model(self, pk, _attempt=0, **updates): """Reload the model instance from the database and update the given fields. """ try: with transaction.atomic(): # Retrieve the model instance. instance = self.model.objects.get(pk=pk) # Update the appropriate fields and save the model # instance, then return the new instance. if updates: update_fields = ['modified'] for field, value in updates.items(): setattr(instance, field, value) update_fields.append(field) if field == 'status': update_fields.append('failed') instance.save(update_fields=update_fields) return instance except DatabaseError as e: # Log out the error to the debug logger. logger.debug('Database error updating %s, retrying in 5 ' 'seconds (retry #%d): %s', self.model._meta.object_name, _attempt + 1, e) # Attempt to retry the update, assuming we haven't already # tried too many times. if _attempt < 5: time.sleep(5) return self.update_model(pk, _attempt=_attempt + 1, **updates) else: logger.error('Failed to update %s after %d retries.', self.model._meta.object_name, _attempt) def get_path_to(self, *args): """ Return absolute path relative to this file. """ return os.path.abspath(os.path.join(os.path.dirname(__file__), *args)) def build_execution_environment_params(self, instance, private_data_dir): if settings.IS_K8S: return {} image = instance.execution_environment.image params = { "container_image": image, "process_isolation": True, "container_options": ['--user=root'], } if instance.execution_environment.credential: cred = instance.execution_environment.credential if cred.has_inputs(field_names=('host', 'username', 'password')): path = os.path.split(private_data_dir)[0] with open(path + '/auth.json', 'w') as authfile: os.chmod(authfile.name, stat.S_IRUSR | stat.S_IWUSR) host = cred.get_input('host') username = cred.get_input('username') password = cred.get_input('password') token = "{}:{}".format(username, password) auth_data = {'auths': {host: {'auth': b64encode(token.encode('ascii')).decode()}}} authfile.write(json.dumps(auth_data, indent=4)) params["container_options"].append(f'--authfile={authfile.name}') else: raise RuntimeError('Please recheck that your host, username, and password fields are all filled.') pull = instance.execution_environment.pull if pull: params['container_options'].append(f'--pull={pull}') if settings.AWX_ISOLATION_SHOW_PATHS: params['container_volume_mounts'] = [] for this_path in settings.AWX_ISOLATION_SHOW_PATHS: params['container_volume_mounts'].append(f'{this_path}:{this_path}:Z') return params def build_private_data(self, instance, private_data_dir): """ Return SSH private key data (only if stored in DB as ssh_key_data). Return structure is a dict of the form: """ def build_private_data_dir(self, instance): """ Create a temporary directory for job-related files. """ pdd_wrapper_path = tempfile.mkdtemp(prefix=f'pdd_wrapper_{instance.pk}_', dir=settings.AWX_ISOLATION_BASE_PATH) os.chmod(pdd_wrapper_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) if settings.AWX_CLEANUP_PATHS: self.cleanup_paths.append(pdd_wrapper_path) path = tempfile.mkdtemp(prefix='awx_%s_' % instance.pk, dir=pdd_wrapper_path) os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) runner_project_folder = os.path.join(path, 'project') if not os.path.exists(runner_project_folder): # Ansible Runner requires that this directory exists. # Specifically, when using process isolation os.mkdir(runner_project_folder) return path def build_private_data_files(self, instance, private_data_dir): """ Creates temporary files containing the private data. Returns a dictionary i.e., { 'credentials': { <awx.main.models.Credential>: '/path/to/decrypted/data', <awx.main.models.Credential>: '/path/to/decrypted/data', ... }, 'certificates': { <awx.main.models.Credential>: /path/to/signed/ssh/certificate, <awx.main.models.Credential>: /path/to/signed/ssh/certificate, ... } } """ private_data = self.build_private_data(instance, private_data_dir) private_data_files = {'credentials': {}} if private_data is not None: for credential, data in private_data.get('credentials', {}).items(): # OpenSSH formatted keys must have a trailing newline to be # accepted by ssh-add. if 'OPENSSH PRIVATE KEY' in data and not data.endswith('\n'): data += '\n' # For credentials used with ssh-add, write to a named pipe which # will be read then closed, instead of leaving the SSH key on disk. if credential and credential.credential_type.namespace in ('ssh', 'scm'): try: os.mkdir(os.path.join(private_data_dir, 'env')) except OSError as e: if e.errno != errno.EEXIST: raise path = os.path.join(private_data_dir, 'env', 'ssh_key') ansible_runner.utils.open_fifo_write(path, data.encode()) private_data_files['credentials']['ssh'] = path # Ansible network modules do not yet support ssh-agent. # Instead, ssh private key file is explicitly passed via an # env variable. else: handle, path = tempfile.mkstemp(dir=private_data_dir) f = os.fdopen(handle, 'w') f.write(data) f.close() os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) private_data_files['credentials'][credential] = path for credential, data in private_data.get('certificates', {}).items(): artifact_dir = os.path.join(private_data_dir, 'artifacts', str(self.instance.id)) if not os.path.exists(artifact_dir): os.makedirs(artifact_dir, mode=0o700) path = os.path.join(artifact_dir, 'ssh_key_data-cert.pub') with open(path, 'w') as f: f.write(data) f.close() os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) return private_data_files def build_passwords(self, instance, runtime_passwords): """ Build a dictionary of passwords for responding to prompts. """ return { 'yes': 'yes', 'no': 'no', '': '', } def build_extra_vars_file(self, instance, private_data_dir): """ Build ansible yaml file filled with extra vars to be passed via -e@file.yml """ def _write_extra_vars_file(self, private_data_dir, vars, safe_dict={}): env_path = os.path.join(private_data_dir, 'env') try: os.mkdir(env_path, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) except OSError as e: if e.errno != errno.EEXIST: raise path = os.path.join(env_path, 'extravars') handle = os.open(path, os.O_RDWR | os.O_CREAT, stat.S_IREAD | stat.S_IWRITE) f = os.fdopen(handle, 'w') if settings.ALLOW_JINJA_IN_EXTRA_VARS == 'always': f.write(yaml.safe_dump(vars)) else: f.write(safe_dump(vars, safe_dict)) f.close() os.chmod(path, stat.S_IRUSR) return path def add_awx_venv(self, env): env['VIRTUAL_ENV'] = settings.AWX_VENV_PATH if 'PATH' in env: env['PATH'] = os.path.join(settings.AWX_VENV_PATH, "bin") + ":" + env['PATH'] else: env['PATH'] = os.path.join(settings.AWX_VENV_PATH, "bin") def build_env(self, instance, private_data_dir, private_data_files=None): """ Build environment dictionary for ansible-playbook. """ env = {} # Add ANSIBLE_* settings to the subprocess environment. for attr in dir(settings): if attr == attr.upper() and attr.startswith('ANSIBLE_'): env[attr] = str(getattr(settings, attr)) # Also set environment variables configured in AWX_TASK_ENV setting. for key, value in settings.AWX_TASK_ENV.items(): env[key] = str(value) env['AWX_PRIVATE_DATA_DIR'] = private_data_dir ee_cred = self.instance.execution_environment.credential if ee_cred: verify_ssl = ee_cred.get_input('verify_ssl') if not verify_ssl: pdd_wrapper_path = os.path.split(private_data_dir)[0] registries_conf_path = os.path.join(pdd_wrapper_path, 'registries.conf') host = ee_cred.get_input('host') with open(registries_conf_path, 'w') as registries_conf: os.chmod(registries_conf.name, stat.S_IRUSR | stat.S_IWUSR) lines = [ '[[registry]]', 'location = "{}"'.format(host), 'insecure = true', ] registries_conf.write('\n'.join(lines)) # Podman >= 3.1.0 env['CONTAINERS_REGISTRIES_CONF'] = registries_conf_path # Podman < 3.1.0 env['REGISTRIES_CONFIG_PATH'] = registries_conf_path return env def build_inventory(self, instance, private_data_dir): script_params = dict(hostvars=True, towervars=True) if hasattr(instance, 'job_slice_number'): script_params['slice_number'] = instance.job_slice_number script_params['slice_count'] = instance.job_slice_count script_data = instance.inventory.get_script_data(**script_params) # maintain a list of host_name --> host_id # so we can associate emitted events to Host objects self.host_map = {hostname: hv.pop('remote_tower_id', '') for hostname, hv in script_data.get('_meta', {}).get('hostvars', {}).items()} json_data = json.dumps(script_data) path = os.path.join(private_data_dir, 'inventory') os.makedirs(path, mode=0o700) fn = os.path.join(path, 'hosts') with open(fn, 'w') as f: os.chmod(fn, stat.S_IRUSR | stat.S_IXUSR | stat.S_IWUSR) f.write('#! /usr/bin/env python3\n# -*- coding: utf-8 -*-\nprint(%r)\n' % json_data) return fn def build_args(self, instance, private_data_dir, passwords): raise NotImplementedError def write_args_file(self, private_data_dir, args): env_path = os.path.join(private_data_dir, 'env') try: os.mkdir(env_path, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) except OSError as e: if e.errno != errno.EEXIST: raise path = os.path.join(env_path, 'cmdline') handle = os.open(path, os.O_RDWR | os.O_CREAT, stat.S_IREAD | stat.S_IWRITE) f = os.fdopen(handle, 'w') f.write(ansible_runner.utils.args2cmdline(*args)) f.close() os.chmod(path, stat.S_IRUSR) return path def build_cwd(self, instance, private_data_dir): raise NotImplementedError def build_credentials_list(self, instance): return [] def get_instance_timeout(self, instance): global_timeout_setting_name = instance._global_timeout_setting() if global_timeout_setting_name: global_timeout = getattr(settings, global_timeout_setting_name, 0) local_timeout = getattr(instance, 'timeout', 0) job_timeout = global_timeout if local_timeout == 0 else local_timeout job_timeout = 0 if local_timeout < 0 else job_timeout else: job_timeout = 0 return job_timeout def get_password_prompts(self, passwords={}): """ Return a dictionary where keys are strings or regular expressions for prompts, and values are password lookup keys (keys that are returned from build_passwords). """ return OrderedDict() def create_expect_passwords_data_struct(self, password_prompts, passwords): expect_passwords = {} for k, v in password_prompts.items(): expect_passwords[k] = passwords.get(v, '') or '' return expect_passwords def pre_run_hook(self, instance, private_data_dir): """ Hook for any steps to run before the job/task starts """ instance.log_lifecycle("pre_run") def post_run_hook(self, instance, status): """ Hook for any steps to run before job/task is marked as complete. """ instance.log_lifecycle("post_run") def final_run_hook(self, instance, status, private_data_dir, fact_modification_times): """ Hook for any steps to run after job/task is marked as complete. """ instance.log_lifecycle("finalize_run") job_profiling_dir = os.path.join(private_data_dir, 'artifacts/playbook_profiling') awx_profiling_dir = '/var/log/tower/playbook_profiling/' collections_info = os.path.join(private_data_dir, 'artifacts/', 'collections.json') ansible_version_file = os.path.join(private_data_dir, 'artifacts/', 'ansible_version.txt') if not os.path.exists(awx_profiling_dir): os.mkdir(awx_profiling_dir) if os.path.isdir(job_profiling_dir): shutil.copytree(job_profiling_dir, os.path.join(awx_profiling_dir, str(instance.pk))) if os.path.exists(collections_info): with open(collections_info) as ee_json_info: ee_collections_info = json.loads(ee_json_info.read()) instance.installed_collections = ee_collections_info instance.save(update_fields=['installed_collections']) if os.path.exists(ansible_version_file): with open(ansible_version_file) as ee_ansible_info: ansible_version_info = ee_ansible_info.readline() instance.ansible_version = ansible_version_info instance.save(update_fields=['ansible_version']) def event_handler(self, event_data): # # ⚠️ D-D-D-DANGER ZONE ⚠️ # This method is called once for *every event* emitted by Ansible # Runner as a playbook runs. That means that changes to the code in # this method are _very_ likely to introduce performance regressions. # # Even if this function is made on average .05s slower, it can have # devastating performance implications for playbooks that emit # tens or hundreds of thousands of events. # # Proceed with caution! # """ Ansible runner puts a parent_uuid on each event, no matter what the type. AWX only saves the parent_uuid if the event is for a Job. """ # cache end_line locally for RunInventoryUpdate tasks # which generate job events from two 'streams': # ansible-inventory and the awx.main.commands.inventory_import # logger if isinstance(self, RunInventoryUpdate): self.end_line = event_data['end_line'] if event_data.get(self.event_data_key, None): if self.event_data_key != 'job_id': event_data.pop('parent_uuid', None) if self.parent_workflow_job_id: event_data['workflow_job_id'] = self.parent_workflow_job_id if self.host_map: host = event_data.get('event_data', {}).get('host', '').strip() if host: event_data['host_name'] = host if host in self.host_map: event_data['host_id'] = self.host_map[host] else: event_data['host_name'] = '' event_data['host_id'] = '' if event_data.get('event') == 'playbook_on_stats': event_data['host_map'] = self.host_map if isinstance(self, RunProjectUpdate): # it's common for Ansible's SCM modules to print # error messages on failure that contain the plaintext # basic auth credentials (username + password) # it's also common for the nested event data itself (['res']['...']) # to contain unredacted text on failure # this is a _little_ expensive to filter # with regex, but project updates don't have many events, # so it *should* have a negligible performance impact task = event_data.get('event_data', {}).get('task_action') try: if task in ('git', 'svn'): event_data_json = json.dumps(event_data) event_data_json = UriCleaner.remove_sensitive(event_data_json) event_data = json.loads(event_data_json) except json.JSONDecodeError: pass if 'event_data' in event_data: event_data['event_data']['guid'] = self.guid event_data.setdefault(self.event_data_key, self.instance.id) self.dispatcher.dispatch(event_data) self.event_ct += 1 ''' Handle artifacts ''' if event_data.get('event_data', {}).get('artifact_data', {}): self.instance.artifacts = event_data['event_data']['artifact_data'] self.instance.save(update_fields=['artifacts']) return False def cancel_callback(self): """ Ansible runner callback to tell the job when/if it is canceled """ unified_job_id = self.instance.pk self.instance = self.update_model(unified_job_id) if not self.instance: logger.error('unified job {} was deleted while running, canceling'.format(unified_job_id)) return True if self.instance.cancel_flag or self.instance.status == 'canceled': cancel_wait = (now() - self.instance.modified).seconds if self.instance.modified else 0 if cancel_wait > 5: logger.warn('Request to cancel {} took {} seconds to complete.'.format(self.instance.log_format, cancel_wait)) return True return False def finished_callback(self, runner_obj): """ Ansible runner callback triggered on finished run """ event_data = { 'event': 'EOF', 'final_counter': self.event_ct, 'guid': self.guid, } event_data.setdefault(self.event_data_key, self.instance.id) self.dispatcher.dispatch(event_data) def status_handler(self, status_data, runner_config): """ Ansible runner callback triggered on status transition """ if status_data['status'] == 'starting': job_env = dict(runner_config.env) ''' Take the safe environment variables and overwrite ''' for k, v in self.safe_env.items(): if k in job_env: job_env[k] = v self.instance = self.update_model(self.instance.pk, job_args=json.dumps(runner_config.command), job_cwd=runner_config.cwd, job_env=job_env) elif status_data['status'] == 'error': result_traceback = status_data.get('result_traceback', None) if result_traceback: self.instance = self.update_model(self.instance.pk, result_traceback=result_traceback) @with_path_cleanup def run(self, pk, **kwargs): """ Run the job/task and capture its output. """ self.instance = self.model.objects.get(pk=pk) if self.instance.execution_environment_id is None: from awx.main.signals import disable_activity_stream with disable_activity_stream(): self.instance = self.update_model(self.instance.pk, execution_environment=self.instance.resolve_execution_environment()) # self.instance because of the update_model pattern and when it's used in callback handlers self.instance = self.update_model(pk, status='running', start_args='') # blank field to remove encrypted passwords self.instance.websocket_emit_status("running") status, rc = 'error', None extra_update_fields = {} fact_modification_times = {} self.event_ct = 0 ''' Needs to be an object property because status_handler uses it in a callback context ''' self.safe_env = {} self.safe_cred_env = {} private_data_dir = None # store a reference to the parent workflow job (if any) so we can include # it in event data JSON if self.instance.spawned_by_workflow: self.parent_workflow_job_id = self.instance.get_workflow_job().id try: self.instance.send_notification_templates("running") private_data_dir = self.build_private_data_dir(self.instance) self.pre_run_hook(self.instance, private_data_dir) self.instance.log_lifecycle("preparing_playbook") if self.instance.cancel_flag: self.instance = self.update_model(self.instance.pk, status='canceled') if self.instance.status != 'running': # Stop the task chain and prevent starting the job if it has # already been canceled. self.instance = self.update_model(pk) status = self.instance.status raise RuntimeError('not starting %s task' % self.instance.status) if not os.path.exists(settings.AWX_ISOLATION_BASE_PATH): raise RuntimeError('AWX_ISOLATION_BASE_PATH=%s does not exist' % settings.AWX_ISOLATION_BASE_PATH) # store a record of the venv used at runtime if hasattr(self.instance, 'custom_virtualenv'): self.update_model(pk, custom_virtualenv=getattr(self.instance, 'ansible_virtualenv_path', settings.ANSIBLE_VENV_PATH)) # Fetch "cached" fact data from prior runs and put on the disk # where ansible expects to find it if getattr(self.instance, 'use_fact_cache', False): self.instance.start_job_fact_cache( os.path.join(private_data_dir, 'artifacts', str(self.instance.id), 'fact_cache'), fact_modification_times, ) # May have to serialize the value private_data_files = self.build_private_data_files(self.instance, private_data_dir) passwords = self.build_passwords(self.instance, kwargs) self.build_extra_vars_file(self.instance, private_data_dir) args = self.build_args(self.instance, private_data_dir, passwords) env = self.build_env(self.instance, private_data_dir, private_data_files=private_data_files) self.safe_env = build_safe_env(env) credentials = self.build_credentials_list(self.instance) for credential in credentials: if credential: credential.credential_type.inject_credential(credential, env, self.safe_cred_env, args, private_data_dir) self.safe_env.update(self.safe_cred_env) self.write_args_file(private_data_dir, args) password_prompts = self.get_password_prompts(passwords) expect_passwords = self.create_expect_passwords_data_struct(password_prompts, passwords) params = { 'ident': self.instance.id, 'private_data_dir': private_data_dir, 'playbook': self.build_playbook_path_relative_to_cwd(self.instance, private_data_dir), 'inventory': self.build_inventory(self.instance, private_data_dir), 'passwords': expect_passwords, 'envvars': env, 'settings': { 'job_timeout': self.get_instance_timeout(self.instance), 'suppress_ansible_output': True, }, } if isinstance(self.instance, AdHocCommand): params['module'] = self.build_module_name(self.instance) params['module_args'] = self.build_module_args(self.instance) if getattr(self.instance, 'use_fact_cache', False): # Enable Ansible fact cache. params['fact_cache_type'] = 'jsonfile' else: # Disable Ansible fact cache. params['fact_cache_type'] = '' if self.instance.is_container_group_task or settings.IS_K8S: params['envvars'].pop('HOME', None) ''' Delete parameters if the values are None or empty array ''' for v in ['passwords', 'playbook', 'inventory']: if not params[v]: del params[v] self.dispatcher = CallbackQueueDispatcher() self.instance.log_lifecycle("running_playbook") if isinstance(self.instance, SystemJob): cwd = self.build_cwd(self.instance, private_data_dir) res = ansible_runner.interface.run( project_dir=cwd, event_handler=self.event_handler, finished_callback=self.finished_callback, status_handler=self.status_handler, **params ) else: receptor_job = AWXReceptorJob(self, params) self.unit_id = receptor_job.unit_id res = receptor_job.run() if not res: return status = res.status rc = res.rc if status == 'timeout': self.instance.job_explanation = "Job terminated due to timeout" status = 'failed' extra_update_fields['job_explanation'] = self.instance.job_explanation # ensure failure notification sends even if playbook_on_stats event is not triggered handle_success_and_failure_notifications.apply_async([self.instance.job.id]) except InvalidVirtualenvError as e: extra_update_fields['job_explanation'] = e.message logger.error('{} {}'.format(self.instance.log_format, e.message)) except Exception: # this could catch programming or file system errors extra_update_fields['result_traceback'] = traceback.format_exc() logger.exception('%s Exception occurred while running task', self.instance.log_format) finally: logger.debug('%s finished running, producing %s events.', self.instance.log_format, self.event_ct) try: self.post_run_hook(self.instance, status) except PostRunError as exc: if status == 'successful': status = exc.status extra_update_fields['job_explanation'] = exc.args[0] if exc.tb: extra_update_fields['result_traceback'] = exc.tb except Exception: logger.exception('{} Post run hook errored.'.format(self.instance.log_format)) self.instance = self.update_model(pk) self.instance = self.update_model(pk, status=status, emitted_events=self.event_ct, **extra_update_fields) try: self.final_run_hook(self.instance, status, private_data_dir, fact_modification_times) except Exception: logger.exception('{} Final run hook errored.'.format(self.instance.log_format)) self.instance.websocket_emit_status(status) if status != 'successful': if status == 'canceled': raise AwxTaskError.TaskCancel(self.instance, rc) else: raise AwxTaskError.TaskError(self.instance, rc) @task(queue=get_local_queuename) class RunJob(BaseTask): """ Run a job using ansible-playbook. """ model = Job event_model = JobEvent event_data_key = 'job_id' def build_private_data(self, job, private_data_dir): """ Returns a dict of the form { 'credentials': { <awx.main.models.Credential>: <credential_decrypted_ssh_key_data>, <awx.main.models.Credential>: <credential_decrypted_ssh_key_data>, ... }, 'certificates': { <awx.main.models.Credential>: <signed SSH certificate data>, <awx.main.models.Credential>: <signed SSH certificate data>, ... } } """ private_data = {'credentials': {}} for credential in job.credentials.prefetch_related('input_sources__source_credential').all(): # If we were sent SSH credentials, decrypt them and send them # back (they will be written to a temporary file). if credential.has_input('ssh_key_data'): private_data['credentials'][credential] = credential.get_input('ssh_key_data', default='') if credential.has_input('ssh_public_key_data'): private_data.setdefault('certificates', {})[credential] = credential.get_input('ssh_public_key_data', default='') return private_data def build_passwords(self, job, runtime_passwords): """ Build a dictionary of passwords for SSH private key, SSH user, sudo/su and ansible-vault. """ passwords = super(RunJob, self).build_passwords(job, runtime_passwords) cred = job.machine_credential if cred: for field in ('ssh_key_unlock', 'ssh_password', 'become_password', 'vault_password'): value = runtime_passwords.get(field, cred.get_input('password' if field == 'ssh_password' else field, default='')) if value not in ('', 'ASK'): passwords[field] = value for cred in job.vault_credentials: field = 'vault_password' vault_id = cred.get_input('vault_id', default=None) if vault_id: field = 'vault_password.{}'.format(vault_id) if field in passwords: raise RuntimeError('multiple vault credentials were specified with --vault-id {}@prompt'.format(vault_id)) value = runtime_passwords.get(field, cred.get_input('vault_password', default='')) if value not in ('', 'ASK'): passwords[field] = value ''' Only 1 value can be provided for a unique prompt string. Prefer ssh key unlock over network key unlock. ''' if 'ssh_key_unlock' not in passwords: for cred in job.network_credentials: if cred.inputs.get('ssh_key_unlock'): passwords['ssh_key_unlock'] = runtime_passwords.get('ssh_key_unlock', cred.get_input('ssh_key_unlock', default='')) break return passwords def build_env(self, job, private_data_dir, private_data_files=None): """ Build environment dictionary for ansible-playbook. """ env = super(RunJob, self).build_env(job, private_data_dir, private_data_files=private_data_files) if private_data_files is None: private_data_files = {} # Set environment variables needed for inventory and job event # callbacks to work. env['JOB_ID'] = str(job.pk) env['INVENTORY_ID'] = str(job.inventory.pk) if job.project: env['PROJECT_REVISION'] = job.project.scm_revision env['ANSIBLE_RETRY_FILES_ENABLED'] = "False" env['MAX_EVENT_RES'] = str(settings.MAX_EVENT_RES_DATA) if hasattr(settings, 'AWX_ANSIBLE_CALLBACK_PLUGINS') and settings.AWX_ANSIBLE_CALLBACK_PLUGINS: env['ANSIBLE_CALLBACK_PLUGINS'] = ':'.join(settings.AWX_ANSIBLE_CALLBACK_PLUGINS) env['AWX_HOST'] = settings.TOWER_URL_BASE # Create a directory for ControlPath sockets that is unique to each job cp_dir = os.path.join(private_data_dir, 'cp') if not os.path.exists(cp_dir): os.mkdir(cp_dir, 0o700) # FIXME: more elegant way to manage this path in container env['ANSIBLE_SSH_CONTROL_PATH_DIR'] = '/runner/cp' # Set environment variables for cloud credentials. cred_files = private_data_files.get('credentials', {}) for cloud_cred in job.cloud_credentials: if cloud_cred and cloud_cred.credential_type.namespace == 'openstack': env['OS_CLIENT_CONFIG_FILE'] = os.path.join('/runner', os.path.basename(cred_files.get(cloud_cred, ''))) for network_cred in job.network_credentials: env['ANSIBLE_NET_USERNAME'] = network_cred.get_input('username', default='') env['ANSIBLE_NET_PASSWORD'] = network_cred.get_input('password', default='') ssh_keyfile = cred_files.get(network_cred, '') if ssh_keyfile: env['ANSIBLE_NET_SSH_KEYFILE'] = ssh_keyfile authorize = network_cred.get_input('authorize', default=False) env['ANSIBLE_NET_AUTHORIZE'] = str(int(authorize)) if authorize: env['ANSIBLE_NET_AUTH_PASS'] = network_cred.get_input('authorize_password', default='') path_vars = ( ('ANSIBLE_COLLECTIONS_PATHS', 'collections_paths', 'requirements_collections', '~/.ansible/collections:/usr/share/ansible/collections'), ('ANSIBLE_ROLES_PATH', 'roles_path', 'requirements_roles', '~/.ansible/roles:/usr/share/ansible/roles:/etc/ansible/roles'), ) config_values = read_ansible_config(job.project.get_project_path(), list(map(lambda x: x[1], path_vars))) for env_key, config_setting, folder, default in path_vars: paths = default.split(':') if env_key in env: for path in env[env_key].split(':'): if path not in paths: paths = [env[env_key]] + paths elif config_setting in config_values: for path in config_values[config_setting].split(':'): if path not in paths: paths = [config_values[config_setting]] + paths # FIXME: again, figure out more elegant way for inside container paths = [os.path.join('/runner', folder)] + paths env[env_key] = os.pathsep.join(paths) return env def build_args(self, job, private_data_dir, passwords): """ Build command line argument list for running ansible-playbook, optionally using ssh-agent for public/private key authentication. """ creds = job.machine_credential ssh_username, become_username, become_method = '', '', '' if creds: ssh_username = creds.get_input('username', default='') become_method = creds.get_input('become_method', default='') become_username = creds.get_input('become_username', default='') else: become_method = None become_username = "" # Always specify the normal SSH user as root by default. Since this # task is normally running in the background under a service account, # it doesn't make sense to rely on ansible-playbook's default of using # the current user. ssh_username = ssh_username or 'root' args = [] if job.job_type == 'check': args.append('--check') args.extend(['-u', sanitize_jinja(ssh_username)]) if 'ssh_password' in passwords: args.append('--ask-pass') if job.become_enabled: args.append('--become') if job.diff_mode: args.append('--diff') if become_method: args.extend(['--become-method', sanitize_jinja(become_method)]) if become_username: args.extend(['--become-user', sanitize_jinja(become_username)]) if 'become_password' in passwords: args.append('--ask-become-pass') # Support prompting for multiple vault passwords for k, v in passwords.items(): if k.startswith('vault_password'): if k == 'vault_password': args.append('--ask-vault-pass') else: # split only on the first dot in case the vault ID itself contains a dot vault_id = k.split('.', 1)[1] args.append('--vault-id') args.append('{}@prompt'.format(vault_id)) if job.forks: if settings.MAX_FORKS > 0 and job.forks > settings.MAX_FORKS: logger.warning(f'Maximum number of forks ({settings.MAX_FORKS}) exceeded.') args.append('--forks=%d' % settings.MAX_FORKS) else: args.append('--forks=%d' % job.forks) if job.force_handlers: args.append('--force-handlers') if job.limit: args.extend(['-l', job.limit]) if job.verbosity: args.append('-%s' % ('v' * min(5, job.verbosity))) if job.job_tags: args.extend(['-t', job.job_tags]) if job.skip_tags: args.append('--skip-tags=%s' % job.skip_tags) if job.start_at_task: args.append('--start-at-task=%s' % job.start_at_task) return args def build_cwd(self, job, private_data_dir): return os.path.join(private_data_dir, 'project') def build_playbook_path_relative_to_cwd(self, job, private_data_dir): return job.playbook def build_extra_vars_file(self, job, private_data_dir): # Define special extra_vars for AWX, combine with job.extra_vars. extra_vars = job.awx_meta_vars() if job.extra_vars_dict: extra_vars.update(json.loads(job.decrypted_extra_vars())) # By default, all extra vars disallow Jinja2 template usage for # security reasons; top level key-values defined in JT.extra_vars, however, # are allowed as "safe" (because they can only be set by users with # higher levels of privilege - those that have the ability create and # edit Job Templates) safe_dict = {} if job.job_template and settings.ALLOW_JINJA_IN_EXTRA_VARS == 'template': safe_dict = job.job_template.extra_vars_dict return self._write_extra_vars_file(private_data_dir, extra_vars, safe_dict) def build_credentials_list(self, job): return job.credentials.prefetch_related('input_sources__source_credential').all() def get_password_prompts(self, passwords={}): d = super(RunJob, self).get_password_prompts(passwords) d[r'Enter passphrase for .*:\s*?$'] = 'ssh_key_unlock' d[r'Bad passphrase, try again for .*:\s*?$'] = '' for method in PRIVILEGE_ESCALATION_METHODS: d[r'%s password.*:\s*?$' % (method[0])] = 'become_password' d[r'%s password.*:\s*?$' % (method[0].upper())] = 'become_password' d[r'BECOME password.*:\s*?$'] = 'become_password' d[r'SSH password:\s*?$'] = 'ssh_password' d[r'Password:\s*?$'] = 'ssh_password' d[r'Vault password:\s*?$'] = 'vault_password' for k, v in passwords.items(): if k.startswith('vault_password.'): # split only on the first dot in case the vault ID itself contains a dot vault_id = k.split('.', 1)[1] d[r'Vault password \({}\):\s*?$'.format(vault_id)] = k return d def build_execution_environment_params(self, instance, private_data_dir): if settings.IS_K8S: return {} params = super(RunJob, self).build_execution_environment_params(instance, private_data_dir) # If this has an insights agent and it is not already mounted then show it insights_dir = os.path.dirname(settings.INSIGHTS_SYSTEM_ID_FILE) if instance.use_fact_cache and os.path.exists(insights_dir): logger.info('not parent of others') params.setdefault('container_volume_mounts', []) params['container_volume_mounts'].extend( [ f"{insights_dir}:{insights_dir}:Z", ] ) return params def pre_run_hook(self, job, private_data_dir): super(RunJob, self).pre_run_hook(job, private_data_dir) if job.inventory is None: error = _('Job could not start because it does not have a valid inventory.') self.update_model(job.pk, status='failed', job_explanation=error) raise RuntimeError(error) elif job.project is None: error = _('Job could not start because it does not have a valid project.') self.update_model(job.pk, status='failed', job_explanation=error) raise RuntimeError(error) elif job.project.status in ('error', 'failed'): msg = _('The project revision for this job template is unknown due to a failed update.') job = self.update_model(job.pk, status='failed', job_explanation=msg) raise RuntimeError(msg) project_path = job.project.get_project_path(check_if_exists=False) job_revision = job.project.scm_revision sync_needs = [] source_update_tag = 'update_{}'.format(job.project.scm_type) branch_override = bool(job.scm_branch and job.scm_branch != job.project.scm_branch) if not job.project.scm_type: pass # manual projects are not synced, user has responsibility for that elif not os.path.exists(project_path): logger.debug('Performing fresh clone of {} on this instance.'.format(job.project)) sync_needs.append(source_update_tag) elif job.project.scm_type == 'git' and job.project.scm_revision and (not branch_override): try: git_repo = git.Repo(project_path) if job_revision == git_repo.head.commit.hexsha: logger.debug('Skipping project sync for {} because commit is locally available'.format(job.log_format)) else: sync_needs.append(source_update_tag) except (ValueError, BadGitName, git.exc.InvalidGitRepositoryError): logger.debug('Needed commit for {} not in local source tree, will sync with remote'.format(job.log_format)) sync_needs.append(source_update_tag) else: logger.debug('Project not available locally, {} will sync with remote'.format(job.log_format)) sync_needs.append(source_update_tag) has_cache = os.path.exists(os.path.join(job.project.get_cache_path(), job.project.cache_id)) # Galaxy requirements are not supported for manual projects if job.project.scm_type and ((not has_cache) or branch_override): sync_needs.extend(['install_roles', 'install_collections']) if sync_needs: pu_ig = job.instance_group pu_en = job.execution_node sync_metafields = dict( launch_type="sync", job_type='run', job_tags=','.join(sync_needs), status='running', instance_group=pu_ig, execution_node=pu_en, celery_task_id=job.celery_task_id, ) if branch_override: sync_metafields['scm_branch'] = job.scm_branch if 'update_' not in sync_metafields['job_tags']: sync_metafields['scm_revision'] = job_revision local_project_sync = job.project.create_project_update(_eager_fields=sync_metafields) # save the associated job before calling run() so that a # cancel() call on the job can cancel the project update job = self.update_model(job.pk, project_update=local_project_sync) project_update_task = local_project_sync._get_task_class() try: # the job private_data_dir is passed so sync can download roles and collections there sync_task = project_update_task(job_private_data_dir=private_data_dir) sync_task.run(local_project_sync.id) local_project_sync.refresh_from_db() job = self.update_model(job.pk, scm_revision=local_project_sync.scm_revision) except Exception: local_project_sync.refresh_from_db() if local_project_sync.status != 'canceled': job = self.update_model( job.pk, status='failed', job_explanation=( 'Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' % ('project_update', local_project_sync.name, local_project_sync.id) ), ) raise job.refresh_from_db() if job.cancel_flag: return else: # Case where a local sync is not needed, meaning that local tree is # up-to-date with project, job is running project current version if job_revision: job = self.update_model(job.pk, scm_revision=job_revision) # Project update does not copy the folder, so copy here RunProjectUpdate.make_local_copy(job.project, private_data_dir, scm_revision=job_revision) if job.inventory.kind == 'smart': # cache smart inventory memberships so that the host_filter query is not # ran inside of the event saving code update_smart_memberships_for_inventory(job.inventory) def final_run_hook(self, job, status, private_data_dir, fact_modification_times): super(RunJob, self).final_run_hook(job, status, private_data_dir, fact_modification_times) if not private_data_dir: # If there's no private data dir, that means we didn't get into the # actual `run()` call; this _usually_ means something failed in # the pre_run_hook method return if job.use_fact_cache: job.finish_job_fact_cache( os.path.join(private_data_dir, 'artifacts', 'fact_cache'), fact_modification_times, ) try: inventory = job.inventory except Inventory.DoesNotExist: pass else: if inventory is not None: update_inventory_computed_fields.delay(inventory.id) @task(queue=get_local_queuename) class RunProjectUpdate(BaseTask): model = ProjectUpdate event_model = ProjectUpdateEvent event_data_key = 'project_update_id' def __init__(self, *args, job_private_data_dir=None, **kwargs): super(RunProjectUpdate, self).__init__(*args, **kwargs) self.playbook_new_revision = None self.original_branch = None self.job_private_data_dir = job_private_data_dir def event_handler(self, event_data): super(RunProjectUpdate, self).event_handler(event_data) returned_data = event_data.get('event_data', {}) if returned_data.get('task_action', '') == 'set_fact': returned_facts = returned_data.get('res', {}).get('ansible_facts', {}) if 'scm_version' in returned_facts: self.playbook_new_revision = returned_facts['scm_version'] def build_private_data(self, project_update, private_data_dir): """ Return SSH private key data needed for this project update. Returns a dict of the form { 'credentials': { <awx.main.models.Credential>: <credential_decrypted_ssh_key_data>, <awx.main.models.Credential>: <credential_decrypted_ssh_key_data>, <awx.main.models.Credential>: <credential_decrypted_ssh_key_data> } } """ private_data = {'credentials': {}} if project_update.credential: credential = project_update.credential if credential.has_input('ssh_key_data'): private_data['credentials'][credential] = credential.get_input('ssh_key_data', default='') return private_data def build_passwords(self, project_update, runtime_passwords): """ Build a dictionary of passwords for SSH private key unlock and SCM username/password. """ passwords = super(RunProjectUpdate, self).build_passwords(project_update, runtime_passwords) if project_update.credential: passwords['scm_key_unlock'] = project_update.credential.get_input('ssh_key_unlock', default='') passwords['scm_username'] = project_update.credential.get_input('username', default='') passwords['scm_password'] = project_update.credential.get_input('password', default='') return passwords def build_env(self, project_update, private_data_dir, private_data_files=None): """ Build environment dictionary for ansible-playbook. """ env = super(RunProjectUpdate, self).build_env(project_update, private_data_dir, private_data_files=private_data_files) env['ANSIBLE_RETRY_FILES_ENABLED'] = str(False) env['ANSIBLE_ASK_PASS'] = str(False) env['ANSIBLE_BECOME_ASK_PASS'] = str(False) env['DISPLAY'] = '' # Prevent stupid password popup when running tests. # give ansible a hint about the intended tmpdir to work around issues # like https://github.com/ansible/ansible/issues/30064 env['TMP'] = settings.AWX_ISOLATION_BASE_PATH env['PROJECT_UPDATE_ID'] = str(project_update.pk) if settings.GALAXY_IGNORE_CERTS: env['ANSIBLE_GALAXY_IGNORE'] = True # build out env vars for Galaxy credentials (in order) galaxy_server_list = [] if project_update.project.organization: for i, cred in enumerate(project_update.project.organization.galaxy_credentials.all()): env[f'ANSIBLE_GALAXY_SERVER_SERVER{i}_URL'] = cred.get_input('url') auth_url = cred.get_input('auth_url', default=None) token = cred.get_input('token', default=None) if token: env[f'ANSIBLE_GALAXY_SERVER_SERVER{i}_TOKEN'] = token if auth_url: env[f'ANSIBLE_GALAXY_SERVER_SERVER{i}_AUTH_URL'] = auth_url galaxy_server_list.append(f'server{i}') if galaxy_server_list: env['ANSIBLE_GALAXY_SERVER_LIST'] = ','.join(galaxy_server_list) return env def _build_scm_url_extra_vars(self, project_update): """ Helper method to build SCM url and extra vars with parameters needed for authentication. """ extra_vars = {} if project_update.credential: scm_username = project_update.credential.get_input('username', default='') scm_password = project_update.credential.get_input('password', default='') else: scm_username = '' scm_password = '' scm_type = project_update.scm_type scm_url = update_scm_url(scm_type, project_update.scm_url, check_special_cases=False) scm_url_parts = urlparse.urlsplit(scm_url) # Prefer the username/password in the URL, if provided. scm_username = scm_url_parts.username or scm_username scm_password = scm_url_parts.password or scm_password if scm_username: if scm_type == 'svn': extra_vars['scm_username'] = scm_username extra_vars['scm_password'] = scm_password scm_password = False if scm_url_parts.scheme != 'svn+ssh': scm_username = False elif scm_url_parts.scheme.endswith('ssh'): scm_password = False elif scm_type in ('insights', 'archive'): extra_vars['scm_username'] = scm_username extra_vars['scm_password'] = scm_password scm_url = update_scm_url(scm_type, scm_url, scm_username, scm_password, scp_format=True) else: scm_url = update_scm_url(scm_type, scm_url, scp_format=True) # Pass the extra accept_hostkey parameter to the git module. if scm_type == 'git' and scm_url_parts.scheme.endswith('ssh'): extra_vars['scm_accept_hostkey'] = 'true' return scm_url, extra_vars def build_inventory(self, instance, private_data_dir): return 'localhost,' def build_args(self, project_update, private_data_dir, passwords): """ Build command line argument list for running ansible-playbook, optionally using ssh-agent for public/private key authentication. """ args = [] if getattr(settings, 'PROJECT_UPDATE_VVV', False): args.append('-vvv') if project_update.job_tags: args.extend(['-t', project_update.job_tags]) return args def build_extra_vars_file(self, project_update, private_data_dir): extra_vars = {} scm_url, extra_vars_new = self._build_scm_url_extra_vars(project_update) extra_vars.update(extra_vars_new) scm_branch = project_update.scm_branch if project_update.job_type == 'run' and (not project_update.branch_override): if project_update.project.scm_revision: scm_branch = project_update.project.scm_revision elif not scm_branch: raise RuntimeError('Could not determine a revision to run from project.') elif not scm_branch: scm_branch = 'HEAD' galaxy_creds_are_defined = project_update.project.organization and project_update.project.organization.galaxy_credentials.exists() if not galaxy_creds_are_defined and (settings.AWX_ROLES_ENABLED or settings.AWX_COLLECTIONS_ENABLED): logger.warning('Galaxy role/collection syncing is enabled, but no ' f'credentials are configured for {project_update.project.organization}.') extra_vars.update( { 'projects_root': settings.PROJECTS_ROOT.rstrip('/'), 'local_path': os.path.basename(project_update.project.local_path), 'project_path': project_update.get_project_path(check_if_exists=False), # deprecated 'insights_url': settings.INSIGHTS_URL_BASE, 'awx_license_type': get_license().get('license_type', 'UNLICENSED'), 'awx_version': get_awx_version(), 'scm_url': scm_url, 'scm_branch': scm_branch, 'scm_clean': project_update.scm_clean, 'scm_track_submodules': project_update.scm_track_submodules, 'roles_enabled': galaxy_creds_are_defined and settings.AWX_ROLES_ENABLED, 'collections_enabled': galaxy_creds_are_defined and settings.AWX_COLLECTIONS_ENABLED, } ) # apply custom refspec from user for PR refs and the like if project_update.scm_refspec: extra_vars['scm_refspec'] = project_update.scm_refspec elif project_update.project.allow_override: # If branch is override-able, do extra fetch for all branches extra_vars['scm_refspec'] = 'refs/heads/*:refs/remotes/origin/*' if project_update.scm_type == 'archive': # for raw archive, prevent error moving files between volumes extra_vars['ansible_remote_tmp'] = os.path.join(project_update.get_project_path(check_if_exists=False), '.ansible_awx', 'tmp') self._write_extra_vars_file(private_data_dir, extra_vars) def build_cwd(self, project_update, private_data_dir): return os.path.join(private_data_dir, 'project') def build_playbook_path_relative_to_cwd(self, project_update, private_data_dir): return os.path.join('project_update.yml') def get_password_prompts(self, passwords={}): d = super(RunProjectUpdate, self).get_password_prompts(passwords) d[r'Username for.*:\s*?$'] = 'scm_username' d[r'Password for.*:\s*?$'] = 'scm_password' d[r'Password:\s*?$'] = 'scm_password' d[r'\S+?@\S+?\'s\s+?password:\s*?$'] = 'scm_password' d[r'Enter passphrase for .*:\s*?$'] = 'scm_key_unlock' d[r'Bad passphrase, try again for .*:\s*?$'] = '' # FIXME: Configure whether we should auto accept host keys? d[r'^Are you sure you want to continue connecting \(yes/no\)\?\s*?$'] = 'yes' return d def _update_dependent_inventories(self, project_update, dependent_inventory_sources): scm_revision = project_update.project.scm_revision inv_update_class = InventoryUpdate._get_task_class() for inv_src in dependent_inventory_sources: if not inv_src.update_on_project_update: continue if inv_src.scm_last_revision == scm_revision: logger.debug('Skipping SCM inventory update for `{}` because ' 'project has not changed.'.format(inv_src.name)) continue logger.debug('Local dependent inventory update for `{}`.'.format(inv_src.name)) with transaction.atomic(): if InventoryUpdate.objects.filter(inventory_source=inv_src, status__in=ACTIVE_STATES).exists(): logger.debug('Skipping SCM inventory update for `{}` because ' 'another update is already active.'.format(inv_src.name)) continue local_inv_update = inv_src.create_inventory_update( _eager_fields=dict( launch_type='scm', status='running', instance_group=project_update.instance_group, execution_node=project_update.execution_node, source_project_update=project_update, celery_task_id=project_update.celery_task_id, ) ) try: inv_update_class().run(local_inv_update.id) except Exception: logger.exception('{} Unhandled exception updating dependent SCM inventory sources.'.format(project_update.log_format)) try: project_update.refresh_from_db() except ProjectUpdate.DoesNotExist: logger.warning('Project update deleted during updates of dependent SCM inventory sources.') break try: local_inv_update.refresh_from_db() except InventoryUpdate.DoesNotExist: logger.warning('%s Dependent inventory update deleted during execution.', project_update.log_format) continue if project_update.cancel_flag: logger.info('Project update {} was canceled while updating dependent inventories.'.format(project_update.log_format)) break if local_inv_update.cancel_flag: logger.info('Continuing to process project dependencies after {} was canceled'.format(local_inv_update.log_format)) if local_inv_update.status == 'successful': inv_src.scm_last_revision = scm_revision inv_src.save(update_fields=['scm_last_revision']) def release_lock(self, instance): try: fcntl.lockf(self.lock_fd, fcntl.LOCK_UN) except IOError as e: logger.error("I/O error({0}) while trying to release lock file [{1}]: {2}".format(e.errno, instance.get_lock_file(), e.strerror)) os.close(self.lock_fd) raise os.close(self.lock_fd) self.lock_fd = None ''' Note: We don't support blocking=False ''' def acquire_lock(self, instance, blocking=True): lock_path = instance.get_lock_file() if lock_path is None: # If from migration or someone blanked local_path for any other reason, recoverable by save instance.save() lock_path = instance.get_lock_file() if lock_path is None: raise RuntimeError(u'Invalid lock file path') try: self.lock_fd = os.open(lock_path, os.O_RDWR | os.O_CREAT) except OSError as e: logger.error("I/O error({0}) while trying to open lock file [{1}]: {2}".format(e.errno, lock_path, e.strerror)) raise start_time = time.time() while True: try: instance.refresh_from_db(fields=['cancel_flag']) if instance.cancel_flag: logger.debug("ProjectUpdate({0}) was canceled".format(instance.pk)) return fcntl.lockf(self.lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) break except IOError as e: if e.errno not in (errno.EAGAIN, errno.EACCES): os.close(self.lock_fd) logger.error("I/O error({0}) while trying to aquire lock on file [{1}]: {2}".format(e.errno, lock_path, e.strerror)) raise else: time.sleep(1.0) waiting_time = time.time() - start_time if waiting_time > 1.0: logger.info('{} spent {} waiting to acquire lock for local source tree ' 'for path {}.'.format(instance.log_format, waiting_time, lock_path)) def pre_run_hook(self, instance, private_data_dir): super(RunProjectUpdate, self).pre_run_hook(instance, private_data_dir) # re-create root project folder if a natural disaster has destroyed it if not os.path.exists(settings.PROJECTS_ROOT): os.mkdir(settings.PROJECTS_ROOT) project_path = instance.project.get_project_path(check_if_exists=False) if not os.path.exists(project_path): os.makedirs(project_path) # used as container mount self.acquire_lock(instance) self.original_branch = None if instance.scm_type == 'git' and instance.branch_override: if os.path.exists(project_path): git_repo = git.Repo(project_path) if git_repo.head.is_detached: self.original_branch = git_repo.head.commit else: self.original_branch = git_repo.active_branch stage_path = os.path.join(instance.get_cache_path(), 'stage') if os.path.exists(stage_path): logger.warning('{0} unexpectedly existed before update'.format(stage_path)) shutil.rmtree(stage_path) os.makedirs(stage_path) # presence of empty cache indicates lack of roles or collections # the project update playbook is not in a git repo, but uses a vendoring directory # to be consistent with the ansible-runner model, # that is moved into the runner project folder here awx_playbooks = self.get_path_to('..', 'playbooks') copy_tree(awx_playbooks, os.path.join(private_data_dir, 'project')) @staticmethod def clear_project_cache(cache_dir, keep_value): if os.path.isdir(cache_dir): for entry in os.listdir(cache_dir): old_path = os.path.join(cache_dir, entry) if entry not in (keep_value, 'stage'): # invalidate, then delete new_path = os.path.join(cache_dir, '.~~delete~~' + entry) try: os.rename(old_path, new_path) shutil.rmtree(new_path) except OSError: logger.warning(f"Could not remove cache directory {old_path}") @staticmethod def make_local_copy(p, job_private_data_dir, scm_revision=None): """Copy project content (roles and collections) to a job private_data_dir :param object p: Either a project or a project update :param str job_private_data_dir: The root of the target ansible-runner folder :param str scm_revision: For branch_override cases, the git revision to copy """ project_path = p.get_project_path(check_if_exists=False) destination_folder = os.path.join(job_private_data_dir, 'project') if not scm_revision: scm_revision = p.scm_revision if p.scm_type == 'git': git_repo = git.Repo(project_path) if not os.path.exists(destination_folder): os.mkdir(destination_folder, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) tmp_branch_name = 'awx_internal/{}'.format(uuid4()) # always clone based on specific job revision if not p.scm_revision: raise RuntimeError('Unexpectedly could not determine a revision to run from project.') source_branch = git_repo.create_head(tmp_branch_name, p.scm_revision) # git clone must take file:// syntax for source repo or else options like depth will be ignored source_as_uri = Path(project_path).as_uri() git.Repo.clone_from( source_as_uri, destination_folder, branch=source_branch, depth=1, single_branch=True, # shallow, do not copy full history ) # submodules copied in loop because shallow copies from local HEADs are ideal # and no git clone submodule options are compatible with minimum requirements for submodule in git_repo.submodules: subrepo_path = os.path.abspath(os.path.join(project_path, submodule.path)) subrepo_destination_folder = os.path.abspath(os.path.join(destination_folder, submodule.path)) subrepo_uri = Path(subrepo_path).as_uri() git.Repo.clone_from(subrepo_uri, subrepo_destination_folder, depth=1, single_branch=True) # force option is necessary because remote refs are not counted, although no information is lost git_repo.delete_head(tmp_branch_name, force=True) else: copy_tree(project_path, destination_folder, preserve_symlinks=1) # copy over the roles and collection cache to job folder cache_path = os.path.join(p.get_cache_path(), p.cache_id) subfolders = [] if settings.AWX_COLLECTIONS_ENABLED: subfolders.append('requirements_collections') if settings.AWX_ROLES_ENABLED: subfolders.append('requirements_roles') for subfolder in subfolders: cache_subpath = os.path.join(cache_path, subfolder) if os.path.exists(cache_subpath): dest_subpath = os.path.join(job_private_data_dir, subfolder) copy_tree(cache_subpath, dest_subpath, preserve_symlinks=1) logger.debug('{0} {1} prepared {2} from cache'.format(type(p).__name__, p.pk, dest_subpath)) def post_run_hook(self, instance, status): super(RunProjectUpdate, self).post_run_hook(instance, status) # To avoid hangs, very important to release lock even if errors happen here try: if self.playbook_new_revision: instance.scm_revision = self.playbook_new_revision instance.save(update_fields=['scm_revision']) # Roles and collection folders copy to durable cache base_path = instance.get_cache_path() stage_path = os.path.join(base_path, 'stage') if status == 'successful' and 'install_' in instance.job_tags: # Clear other caches before saving this one, and if branch is overridden # do not clear cache for main branch, but do clear it for other branches self.clear_project_cache(base_path, keep_value=instance.project.cache_id) cache_path = os.path.join(base_path, instance.cache_id) if os.path.exists(stage_path): if os.path.exists(cache_path): logger.warning('Rewriting cache at {0}, performance may suffer'.format(cache_path)) shutil.rmtree(cache_path) os.rename(stage_path, cache_path) logger.debug('{0} wrote to cache at {1}'.format(instance.log_format, cache_path)) elif os.path.exists(stage_path): shutil.rmtree(stage_path) # cannot trust content update produced if self.job_private_data_dir: if status == 'successful': # copy project folder before resetting to default branch # because some git-tree-specific resources (like submodules) might matter self.make_local_copy(instance, self.job_private_data_dir) if self.original_branch: # for git project syncs, non-default branches can be problems # restore to branch the repo was on before this run try: self.original_branch.checkout() except Exception: # this could have failed due to dirty tree, but difficult to predict all cases logger.exception('Failed to restore project repo to prior state after {}'.format(instance.log_format)) finally: self.release_lock(instance) p = instance.project if instance.job_type == 'check' and status not in ( 'failed', 'canceled', ): if self.playbook_new_revision: p.scm_revision = self.playbook_new_revision else: if status == 'successful': logger.error("{} Could not find scm revision in check".format(instance.log_format)) p.playbook_files = p.playbooks p.inventory_files = p.inventories p.save(update_fields=['scm_revision', 'playbook_files', 'inventory_files']) # Update any inventories that depend on this project dependent_inventory_sources = p.scm_inventory_sources.filter(update_on_project_update=True) if len(dependent_inventory_sources) > 0: if status == 'successful' and instance.launch_type != 'sync': self._update_dependent_inventories(instance, dependent_inventory_sources) def build_execution_environment_params(self, instance, private_data_dir): if settings.IS_K8S: return {} params = super(RunProjectUpdate, self).build_execution_environment_params(instance, private_data_dir) project_path = instance.get_project_path(check_if_exists=False) cache_path = instance.get_cache_path() params.setdefault('container_volume_mounts', []) params['container_volume_mounts'].extend( [ f"{project_path}:{project_path}:Z", f"{cache_path}:{cache_path}:Z", ] ) return params @task(queue=get_local_queuename) class RunInventoryUpdate(BaseTask): model = InventoryUpdate event_model = InventoryUpdateEvent event_data_key = 'inventory_update_id' def build_private_data(self, inventory_update, private_data_dir): """ Return private data needed for inventory update. Returns a dict of the form { 'credentials': { <awx.main.models.Credential>: <credential_decrypted_ssh_key_data>, <awx.main.models.Credential>: <credential_decrypted_ssh_key_data>, <awx.main.models.Credential>: <credential_decrypted_ssh_key_data> } } If no private data is needed, return None. """ if inventory_update.source in InventorySource.injectors: injector = InventorySource.injectors[inventory_update.source]() return injector.build_private_data(inventory_update, private_data_dir) def build_env(self, inventory_update, private_data_dir, private_data_files=None): """Build environment dictionary for ansible-inventory. Most environment variables related to credentials or configuration are accomplished by the inventory source injectors (in this method) or custom credential type injectors (in main run method). """ env = super(RunInventoryUpdate, self).build_env(inventory_update, private_data_dir, private_data_files=private_data_files) if private_data_files is None: private_data_files = {} # Pass inventory source ID to inventory script. env['INVENTORY_SOURCE_ID'] = str(inventory_update.inventory_source_id) env['INVENTORY_UPDATE_ID'] = str(inventory_update.pk) env.update(STANDARD_INVENTORY_UPDATE_ENV) injector = None if inventory_update.source in InventorySource.injectors: injector = InventorySource.injectors[inventory_update.source]() if injector is not None: env = injector.build_env(inventory_update, env, private_data_dir, private_data_files) # All CLOUD_PROVIDERS sources implement as inventory plugin from collection env['ANSIBLE_INVENTORY_ENABLED'] = 'auto' if inventory_update.source == 'scm': for env_k in inventory_update.source_vars_dict: if str(env_k) not in env and str(env_k) not in settings.INV_ENV_VARIABLE_BLOCKED: env[str(env_k)] = str(inventory_update.source_vars_dict[env_k]) elif inventory_update.source == 'file': raise NotImplementedError('Cannot update file sources through the task system.') if inventory_update.source == 'scm' and inventory_update.source_project_update: env_key = 'ANSIBLE_COLLECTIONS_PATHS' config_setting = 'collections_paths' folder = 'requirements_collections' default = '~/.ansible/collections:/usr/share/ansible/collections' config_values = read_ansible_config(os.path.join(private_data_dir, 'project'), [config_setting]) paths = default.split(':') if env_key in env: for path in env[env_key].split(':'): if path not in paths: paths = [env[env_key]] + paths elif config_setting in config_values: for path in config_values[config_setting].split(':'): if path not in paths: paths = [config_values[config_setting]] + paths # FIXME: containers paths = [os.path.join('/runner', folder)] + paths env[env_key] = os.pathsep.join(paths) return env def write_args_file(self, private_data_dir, args): path = os.path.join(private_data_dir, 'args') handle = os.open(path, os.O_RDWR | os.O_CREAT, stat.S_IREAD | stat.S_IWRITE) f = os.fdopen(handle, 'w') f.write(' '.join(args)) f.close() os.chmod(path, stat.S_IRUSR) return path def build_args(self, inventory_update, private_data_dir, passwords): """Build the command line argument list for running an inventory import. """ # Get the inventory source and inventory. inventory_source = inventory_update.inventory_source inventory = inventory_source.inventory if inventory is None: raise RuntimeError('Inventory Source is not associated with an Inventory.') args = ['ansible-inventory', '--list', '--export'] # Add arguments for the source inventory file/script/thing rel_path = self.pseudo_build_inventory(inventory_update, private_data_dir) container_location = os.path.join('/runner', rel_path) # TODO: make container paths elegant source_location = os.path.join(private_data_dir, rel_path) args.append('-i') args.append(container_location) args.append('--output') args.append(os.path.join('/runner', 'artifacts', str(inventory_update.id), 'output.json')) if os.path.isdir(source_location): playbook_dir = container_location else: playbook_dir = os.path.dirname(container_location) args.extend(['--playbook-dir', playbook_dir]) if inventory_update.verbosity: args.append('-' + 'v' * min(5, inventory_update.verbosity * 2 + 1)) return args def build_inventory(self, inventory_update, private_data_dir): return None # what runner expects in order to not deal with inventory def pseudo_build_inventory(self, inventory_update, private_data_dir): """Inventory imports are ran through a management command we pass the inventory in args to that command, so this is not considered to be "Ansible" inventory (by runner) even though it is Eventually, we would like to cut out the management command, and thus use this as the real inventory """ src = inventory_update.source injector = None if inventory_update.source in InventorySource.injectors: injector = InventorySource.injectors[src]() if injector is not None: content = injector.inventory_contents(inventory_update, private_data_dir) # must be a statically named file inventory_path = os.path.join(private_data_dir, injector.filename) with open(inventory_path, 'w') as f: f.write(content) os.chmod(inventory_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) rel_path = injector.filename elif src == 'scm': rel_path = os.path.join('project', inventory_update.source_path) return rel_path def build_cwd(self, inventory_update, private_data_dir): """ There is one case where the inventory "source" is in a different location from the private data: - SCM, where source needs to live in the project folder """ src = inventory_update.source container_dir = '/runner' # TODO: make container paths elegant if src == 'scm' and inventory_update.source_project_update: return os.path.join(container_dir, 'project') return container_dir def build_playbook_path_relative_to_cwd(self, inventory_update, private_data_dir): return None def build_credentials_list(self, inventory_update): # All credentials not used by inventory source injector return inventory_update.get_extra_credentials() def pre_run_hook(self, inventory_update, private_data_dir): super(RunInventoryUpdate, self).pre_run_hook(inventory_update, private_data_dir) source_project = None if inventory_update.inventory_source: source_project = inventory_update.inventory_source.source_project if ( inventory_update.source == 'scm' and inventory_update.launch_type != 'scm' and source_project and source_project.scm_type ): # never ever update manual projects # Check if the content cache exists, so that we do not unnecessarily re-download roles sync_needs = ['update_{}'.format(source_project.scm_type)] has_cache = os.path.exists(os.path.join(source_project.get_cache_path(), source_project.cache_id)) # Galaxy requirements are not supported for manual projects if not has_cache: sync_needs.extend(['install_roles', 'install_collections']) local_project_sync = source_project.create_project_update( _eager_fields=dict( launch_type="sync", job_type='run', job_tags=','.join(sync_needs), status='running', execution_node=inventory_update.execution_node, instance_group=inventory_update.instance_group, celery_task_id=inventory_update.celery_task_id, ) ) # associate the inventory update before calling run() so that a # cancel() call on the inventory update can cancel the project update local_project_sync.scm_inventory_updates.add(inventory_update) project_update_task = local_project_sync._get_task_class() try: sync_task = project_update_task(job_private_data_dir=private_data_dir) sync_task.run(local_project_sync.id) local_project_sync.refresh_from_db() inventory_update.inventory_source.scm_last_revision = local_project_sync.scm_revision inventory_update.inventory_source.save(update_fields=['scm_last_revision']) except Exception: inventory_update = self.update_model( inventory_update.pk, status='failed', job_explanation=( 'Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' % ('project_update', local_project_sync.name, local_project_sync.id) ), ) raise elif inventory_update.source == 'scm' and inventory_update.launch_type == 'scm' and source_project: # This follows update, not sync, so make copy here RunProjectUpdate.make_local_copy(source_project, private_data_dir) def post_run_hook(self, inventory_update, status): super(RunInventoryUpdate, self).post_run_hook(inventory_update, status) if status != 'successful': return # nothing to save, step out of the way to allow error reporting private_data_dir = inventory_update.job_env['AWX_PRIVATE_DATA_DIR'] expected_output = os.path.join(private_data_dir, 'artifacts', 'output.json') with open(expected_output) as f: data = json.load(f) # build inventory save options options = dict( overwrite=inventory_update.overwrite, overwrite_vars=inventory_update.overwrite_vars, ) src = inventory_update.source if inventory_update.enabled_var: options['enabled_var'] = inventory_update.enabled_var options['enabled_value'] = inventory_update.enabled_value else: if getattr(settings, '%s_ENABLED_VAR' % src.upper(), False): options['enabled_var'] = getattr(settings, '%s_ENABLED_VAR' % src.upper()) if getattr(settings, '%s_ENABLED_VALUE' % src.upper(), False): options['enabled_value'] = getattr(settings, '%s_ENABLED_VALUE' % src.upper()) if inventory_update.host_filter: options['host_filter'] = inventory_update.host_filter if getattr(settings, '%s_EXCLUDE_EMPTY_GROUPS' % src.upper()): options['exclude_empty_groups'] = True if getattr(settings, '%s_INSTANCE_ID_VAR' % src.upper(), False): options['instance_id_var'] = getattr(settings, '%s_INSTANCE_ID_VAR' % src.upper()) # Verbosity is applied to saving process, as well as ansible-inventory CLI option if inventory_update.verbosity: options['verbosity'] = inventory_update.verbosity handler = SpecialInventoryHandler( self.event_handler, self.cancel_callback, verbosity=inventory_update.verbosity, job_timeout=self.get_instance_timeout(self.instance), start_time=inventory_update.started, counter=self.event_ct, initial_line=self.end_line, ) inv_logger = logging.getLogger('awx.main.commands.inventory_import') formatter = inv_logger.handlers[0].formatter formatter.job_start = inventory_update.started handler.formatter = formatter inv_logger.handlers[0] = handler from awx.main.management.commands.inventory_import import Command as InventoryImportCommand cmd = InventoryImportCommand() try: # save the inventory data to database. # canceling exceptions will be handled in the global post_run_hook cmd.perform_update(options, data, inventory_update) except PermissionDenied as exc: logger.exception('License error saving {} content'.format(inventory_update.log_format)) raise PostRunError(str(exc), status='error') except PostRunError: logger.exception('Error saving {} content, rolling back changes'.format(inventory_update.log_format)) raise except Exception: logger.exception('Exception saving {} content, rolling back changes.'.format(inventory_update.log_format)) raise PostRunError('Error occured while saving inventory data, see traceback or server logs', status='error', tb=traceback.format_exc()) @task(queue=get_local_queuename) class RunAdHocCommand(BaseTask): """ Run an ad hoc command using ansible. """ model = AdHocCommand event_model = AdHocCommandEvent event_data_key = 'ad_hoc_command_id' def build_private_data(self, ad_hoc_command, private_data_dir): """ Return SSH private key data needed for this ad hoc command (only if stored in DB as ssh_key_data). Returns a dict of the form { 'credentials': { <awx.main.models.Credential>: <credential_decrypted_ssh_key_data>, <awx.main.models.Credential>: <credential_decrypted_ssh_key_data>, ... }, 'certificates': { <awx.main.models.Credential>: <signed SSH certificate data>, <awx.main.models.Credential>: <signed SSH certificate data>, ... } } """ # If we were sent SSH credentials, decrypt them and send them # back (they will be written to a temporary file). creds = ad_hoc_command.credential private_data = {'credentials': {}} if creds and creds.has_input('ssh_key_data'): private_data['credentials'][creds] = creds.get_input('ssh_key_data', default='') if creds and creds.has_input('ssh_public_key_data'): private_data.setdefault('certificates', {})[creds] = creds.get_input('ssh_public_key_data', default='') return private_data def build_passwords(self, ad_hoc_command, runtime_passwords): """ Build a dictionary of passwords for SSH private key, SSH user and sudo/su. """ passwords = super(RunAdHocCommand, self).build_passwords(ad_hoc_command, runtime_passwords) cred = ad_hoc_command.credential if cred: for field in ('ssh_key_unlock', 'ssh_password', 'become_password'): value = runtime_passwords.get(field, cred.get_input('password' if field == 'ssh_password' else field, default='')) if value not in ('', 'ASK'): passwords[field] = value return passwords def build_env(self, ad_hoc_command, private_data_dir, private_data_files=None): """ Build environment dictionary for ansible. """ env = super(RunAdHocCommand, self).build_env(ad_hoc_command, private_data_dir, private_data_files=private_data_files) # Set environment variables needed for inventory and ad hoc event # callbacks to work. env['AD_HOC_COMMAND_ID'] = str(ad_hoc_command.pk) env['INVENTORY_ID'] = str(ad_hoc_command.inventory.pk) env['INVENTORY_HOSTVARS'] = str(True) env['ANSIBLE_LOAD_CALLBACK_PLUGINS'] = '1' env['ANSIBLE_SFTP_BATCH_MODE'] = 'False' return env def build_args(self, ad_hoc_command, private_data_dir, passwords): """ Build command line argument list for running ansible, optionally using ssh-agent for public/private key authentication. """ creds = ad_hoc_command.credential ssh_username, become_username, become_method = '', '', '' if creds: ssh_username = creds.get_input('username', default='') become_method = creds.get_input('become_method', default='') become_username = creds.get_input('become_username', default='') else: become_method = None become_username = "" # Always specify the normal SSH user as root by default. Since this # task is normally running in the background under a service account, # it doesn't make sense to rely on ansible's default of using the # current user. ssh_username = ssh_username or 'root' args = [] if ad_hoc_command.job_type == 'check': args.append('--check') args.extend(['-u', sanitize_jinja(ssh_username)]) if 'ssh_password' in passwords: args.append('--ask-pass') # We only specify sudo/su user and password if explicitly given by the # credential. Credential should never specify both sudo and su. if ad_hoc_command.become_enabled: args.append('--become') if become_method: args.extend(['--become-method', sanitize_jinja(become_method)]) if become_username: args.extend(['--become-user', sanitize_jinja(become_username)]) if 'become_password' in passwords: args.append('--ask-become-pass') if ad_hoc_command.forks: # FIXME: Max limit? args.append('--forks=%d' % ad_hoc_command.forks) if ad_hoc_command.diff_mode: args.append('--diff') if ad_hoc_command.verbosity: args.append('-%s' % ('v' * min(5, ad_hoc_command.verbosity))) extra_vars = ad_hoc_command.awx_meta_vars() if ad_hoc_command.extra_vars_dict: redacted_extra_vars, removed_vars = extract_ansible_vars(ad_hoc_command.extra_vars_dict) if removed_vars: raise ValueError(_("{} are prohibited from use in ad hoc commands.").format(", ".join(removed_vars))) extra_vars.update(ad_hoc_command.extra_vars_dict) if ad_hoc_command.limit: args.append(ad_hoc_command.limit) else: args.append('all') return args def build_extra_vars_file(self, ad_hoc_command, private_data_dir): extra_vars = ad_hoc_command.awx_meta_vars() if ad_hoc_command.extra_vars_dict: redacted_extra_vars, removed_vars = extract_ansible_vars(ad_hoc_command.extra_vars_dict) if removed_vars: raise ValueError(_("{} are prohibited from use in ad hoc commands.").format(", ".join(removed_vars))) extra_vars.update(ad_hoc_command.extra_vars_dict) self._write_extra_vars_file(private_data_dir, extra_vars) def build_module_name(self, ad_hoc_command): return ad_hoc_command.module_name def build_module_args(self, ad_hoc_command): module_args = ad_hoc_command.module_args if settings.ALLOW_JINJA_IN_EXTRA_VARS != 'always': module_args = sanitize_jinja(module_args) return module_args def build_cwd(self, ad_hoc_command, private_data_dir): return private_data_dir def build_playbook_path_relative_to_cwd(self, job, private_data_dir): return None def get_password_prompts(self, passwords={}): d = super(RunAdHocCommand, self).get_password_prompts() d[r'Enter passphrase for .*:\s*?$'] = 'ssh_key_unlock' d[r'Bad passphrase, try again for .*:\s*?$'] = '' for method in PRIVILEGE_ESCALATION_METHODS: d[r'%s password.*:\s*?$' % (method[0])] = 'become_password' d[r'%s password.*:\s*?$' % (method[0].upper())] = 'become_password' d[r'BECOME password.*:\s*?$'] = 'become_password' d[r'SSH password:\s*?$'] = 'ssh_password' d[r'Password:\s*?$'] = 'ssh_password' return d @task(queue=get_local_queuename) class RunSystemJob(BaseTask): model = SystemJob event_model = SystemJobEvent event_data_key = 'system_job_id' def build_execution_environment_params(self, system_job, private_data_dir): return {} def build_args(self, system_job, private_data_dir, passwords): args = ['awx-manage', system_job.job_type] try: # System Job extra_vars can be blank, must be JSON if not blank if system_job.extra_vars == '': json_vars = {} else: json_vars = json.loads(system_job.extra_vars) if system_job.job_type in ('cleanup_jobs', 'cleanup_activitystream'): if 'days' in json_vars: args.extend(['--days', str(json_vars.get('days', 60))]) if 'dry_run' in json_vars and json_vars['dry_run']: args.extend(['--dry-run']) if system_job.job_type == 'cleanup_jobs': args.extend( ['--jobs', '--project-updates', '--inventory-updates', '--management-jobs', '--ad-hoc-commands', '--workflow-jobs', '--notifications'] ) except Exception: logger.exception("{} Failed to parse system job".format(system_job.log_format)) return args def write_args_file(self, private_data_dir, args): path = os.path.join(private_data_dir, 'args') handle = os.open(path, os.O_RDWR | os.O_CREAT, stat.S_IREAD | stat.S_IWRITE) f = os.fdopen(handle, 'w') f.write(' '.join(args)) f.close() os.chmod(path, stat.S_IRUSR) return path def build_env(self, instance, private_data_dir, private_data_files=None): base_env = super(RunSystemJob, self).build_env(instance, private_data_dir, private_data_files=private_data_files) # TODO: this is able to run by turning off isolation # the goal is to run it a container instead env = dict(os.environ.items()) env.update(base_env) return env def build_cwd(self, instance, private_data_dir): return settings.BASE_DIR def build_playbook_path_relative_to_cwd(self, job, private_data_dir): return None def build_inventory(self, instance, private_data_dir): return None def _reconstruct_relationships(copy_mapping): for old_obj, new_obj in copy_mapping.items(): model = type(old_obj) for field_name in getattr(model, 'FIELDS_TO_PRESERVE_AT_COPY', []): field = model._meta.get_field(field_name) if isinstance(field, ForeignKey): if getattr(new_obj, field_name, None): continue related_obj = getattr(old_obj, field_name) related_obj = copy_mapping.get(related_obj, related_obj) setattr(new_obj, field_name, related_obj) elif field.many_to_many: for related_obj in getattr(old_obj, field_name).all(): logger.debug('Deep copy: Adding {} to {}({}).{} relationship'.format(related_obj, new_obj, model, field_name)) getattr(new_obj, field_name).add(copy_mapping.get(related_obj, related_obj)) new_obj.save() @task(queue=get_local_queuename) def deep_copy_model_obj(model_module, model_name, obj_pk, new_obj_pk, user_pk, uuid, permission_check_func=None): sub_obj_list = cache.get(uuid) if sub_obj_list is None: logger.error('Deep copy {} from {} to {} failed unexpectedly.'.format(model_name, obj_pk, new_obj_pk)) return logger.debug('Deep copy {} from {} to {}.'.format(model_name, obj_pk, new_obj_pk)) from awx.api.generics import CopyAPIView from awx.main.signals import disable_activity_stream model = getattr(importlib.import_module(model_module), model_name, None) if model is None: return try: obj = model.objects.get(pk=obj_pk) new_obj = model.objects.get(pk=new_obj_pk) creater = User.objects.get(pk=user_pk) except ObjectDoesNotExist: logger.warning("Object or user no longer exists.") return with transaction.atomic(), ignore_inventory_computed_fields(), disable_activity_stream(): copy_mapping = {} for sub_obj_setup in sub_obj_list: sub_model = getattr(importlib.import_module(sub_obj_setup[0]), sub_obj_setup[1], None) if sub_model is None: continue try: sub_obj = sub_model.objects.get(pk=sub_obj_setup[2]) except ObjectDoesNotExist: continue copy_mapping.update(CopyAPIView.copy_model_obj(obj, new_obj, sub_model, sub_obj, creater)) _reconstruct_relationships(copy_mapping) if permission_check_func: permission_check_func = getattr(getattr(importlib.import_module(permission_check_func[0]), permission_check_func[1]), permission_check_func[2]) permission_check_func(creater, copy_mapping.values()) if isinstance(new_obj, Inventory): update_inventory_computed_fields.delay(new_obj.id) class AWXReceptorJob: def __init__(self, task=None, runner_params=None): self.task = task self.runner_params = runner_params self.unit_id = None if self.task and not self.task.instance.is_container_group_task: execution_environment_params = self.task.build_execution_environment_params(self.task.instance, runner_params['private_data_dir']) self.runner_params['settings'].update(execution_environment_params) def run(self): # We establish a connection to the Receptor socket receptor_ctl = ReceptorControl('/var/run/receptor/receptor.sock') try: return self._run_internal(receptor_ctl) finally: # Make sure to always release the work unit if we established it if self.unit_id is not None and settings.RECEPTOR_RELEASE_WORK: receptor_ctl.simple_command(f"work release {self.unit_id}") def _run_internal(self, receptor_ctl): # Create a socketpair. Where the left side will be used for writing our payload # (private data dir, kwargs). The right side will be passed to Receptor for # reading. sockin, sockout = socket.socketpair() threading.Thread(target=self.transmit, args=[sockin]).start() # submit our work, passing # in the right side of our socketpair for reading. result = receptor_ctl.submit_work(worktype=self.work_type, payload=sockout.makefile('rb'), params=self.receptor_params) self.unit_id = result['unitid'] sockin.close() sockout.close() resultsock, resultfile = receptor_ctl.get_work_results(self.unit_id, return_socket=True, return_sockfile=True) # Both "processor" and "cancel_watcher" are spawned in separate threads. # We wait for the first one to return. If cancel_watcher returns first, # we yank the socket out from underneath the processor, which will cause it # to exit. A reference to the processor_future is passed into the cancel_watcher_future, # Which exits if the job has finished normally. The context manager ensures we do not # leave any threads laying around. with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor: processor_future = executor.submit(self.processor, resultfile) cancel_watcher_future = executor.submit(self.cancel_watcher, processor_future) futures = [processor_future, cancel_watcher_future] first_future = concurrent.futures.wait(futures, return_when=concurrent.futures.FIRST_COMPLETED) res = list(first_future.done)[0].result() if res.status == 'canceled': receptor_ctl.simple_command(f"work cancel {self.unit_id}") resultsock.shutdown(socket.SHUT_RDWR) resultfile.close() elif res.status == 'error': # TODO: There should be a more efficient way of getting this information receptor_work_list = receptor_ctl.simple_command("work list") detail = receptor_work_list[self.unit_id]['Detail'] state_name = receptor_work_list[self.unit_id]['StateName'] if 'exceeded quota' in detail: logger.warn(detail) log_name = self.task.instance.log_format logger.warn(f"Could not launch pod for {log_name}. Exceeded quota.") self.task.update_model(self.task.instance.pk, status='pending') return # If ansible-runner ran, but an error occured at runtime, the traceback information # is saved via the status_handler passed in to the processor. if state_name == 'Succeeded': return res raise RuntimeError(detail) return res # Spawned in a thread so Receptor can start reading before we finish writing, we # write our payload to the left side of our socketpair. def transmit(self, _socket): if not settings.IS_K8S and self.work_type == 'local': self.runner_params['only_transmit_kwargs'] = True ansible_runner.interface.run(streamer='transmit', _output=_socket.makefile('wb'), **self.runner_params) # Socket must be shutdown here, or the reader will hang forever. _socket.shutdown(socket.SHUT_WR) def processor(self, resultfile): return ansible_runner.interface.run( streamer='process', quiet=True, _input=resultfile, event_handler=self.task.event_handler, finished_callback=self.task.finished_callback, status_handler=self.task.status_handler, **self.runner_params, ) @property def receptor_params(self): if self.task.instance.is_container_group_task: spec_yaml = yaml.dump(self.pod_definition, explicit_start=True) receptor_params = { "secret_kube_pod": spec_yaml, } if self.credential: kubeconfig_yaml = yaml.dump(self.kube_config, explicit_start=True) receptor_params["secret_kube_config"] = kubeconfig_yaml else: private_data_dir = self.runner_params['private_data_dir'] receptor_params = {"params": f"--private-data-dir={private_data_dir}"} return receptor_params @property def work_type(self): if self.task.instance.is_container_group_task: if self.credential: work_type = 'kubernetes-runtime-auth' else: work_type = 'kubernetes-incluster-auth' else: work_type = 'local' return work_type def cancel_watcher(self, processor_future): while True: if processor_future.done(): return processor_future.result() if self.task.cancel_callback(): result = namedtuple('result', ['status', 'rc']) return result('canceled', 1) if hasattr(self, 'unit_id') and 'RECEPTOR_UNIT_ID' not in self.task.instance.job_env: self.task.instance.job_env['RECEPTOR_UNIT_ID'] = self.unit_id self.task.update_model(self.task.instance.pk, job_env=self.task.instance.job_env) time.sleep(1) @property def pod_definition(self): if self.task: ee = self.task.instance.resolve_execution_environment() else: ee = get_default_execution_environment() default_pod_spec = get_default_pod_spec() pod_spec_override = {} if self.task and self.task.instance.instance_group.pod_spec_override: pod_spec_override = parse_yaml_or_json(self.task.instance.instance_group.pod_spec_override) pod_spec = {**default_pod_spec, **pod_spec_override} pod_spec['spec']['containers'][0]['image'] = ee.image pod_spec['spec']['containers'][0]['args'] = ['ansible-runner', 'worker', '--private-data-dir=/runner'] if self.task: pod_spec['metadata'] = deepmerge( pod_spec.get('metadata', {}), dict(name=self.pod_name, labels={'ansible-awx': settings.INSTALL_UUID, 'ansible-awx-job-id': str(self.task.instance.id)}), ) return pod_spec @property def pod_name(self): return f"automation-job-{self.task.instance.id}" @property def credential(self): return self.task.instance.instance_group.credential @property def namespace(self): return self.pod_definition['metadata']['namespace'] @property def kube_config(self): host_input = self.credential.get_input('host') config = { "apiVersion": "v1", "kind": "Config", "preferences": {}, "clusters": [{"name": host_input, "cluster": {"server": host_input}}], "users": [{"name": host_input, "user": {"token": self.credential.get_input('bearer_token')}}], "contexts": [{"name": host_input, "context": {"cluster": host_input, "user": host_input, "namespace": self.namespace}}], "current-context": host_input, } if self.credential.get_input('verify_ssl') and 'ssl_ca_cert' in self.credential.inputs: config["clusters"][0]["cluster"]["certificate-authority-data"] = b64encode( self.credential.get_input('ssl_ca_cert').encode() # encode to bytes ).decode() # decode the base64 data into a str else: config["clusters"][0]["cluster"]["insecure-skip-tls-verify"] = True return config
tcp_sink.py
import logging import threading import socketserver import hdfs import traceback class TCPSink(object): def __init__(self, queue): self.queue = queue self.thread = threading.Thread(target=self.run, args=()) self.thread.daemon = True self.thread.start() def run(self): queue = self.queue class TCPSinkHandler(socketserver.StreamRequestHandler): def handle(self): logging.info("TCPSink: Spark connected") try: while True: logging.info("TCPSink: waiting for chunk in queue ...") data = queue.get() if data is None: logging.info("TCPSink: done") queue.task_done() break chunk_size = len(data) logging.info("TCPSink: sending chunk of %s bytes to Spark ..." % chunk_size) self.wfile.write(("%s\n" % chunk_size).encode()) self.wfile.write(data) self.wfile.flush() queue.task_done() except: err_msg = traceback.format_exc() logging.error("TCPSink: error sending data:\n%s" % err_msg) finally: logging.info("TCPSink: Spark disconnected") with socketserver.TCPServer(('0.0.0.0', 8890), TCPSinkHandler) as server: server.serve_forever()
server.py
from flask import Flask, render_template from flask import jsonify from threading import Thread import threading import time import datetime import shotgunEventDaemon as sgED import logging import Queue import json # App instance app = Flask(__name__) thread = None running = True # Debug mode, with auto-reload app.debug = True plugcollections = [] counter = 0 class EngineCli(sgED.Engine): def __init__(self, configPath): super(EngineCli,self).__init__(configPath) handler = logging.StreamHandler() handler.setFormatter(logging.Formatter("%(levelname)s:%(name)s:%(message)s")) logging.getLogger().addHandler(handler) self.setLogger(logging.getLogger()) engine = EngineCli("shotgunEventDaemon.conf") # Get The EventLogEntry ID #global sg """ sgConnection = sg.Shotgun(engine.config.getShotgunURL(), engine.config.getEngineScriptName(), engine.config.getEngineScriptKey(), http_proxy=engine.config.getEngineProxyServer()) event = sgConnection.find_one("EventLogEntry", [["id", "is", int(id)]], fields=[ 'id', 'event_type', 'attribute_name', 'meta', 'entity', 'user', 'project', 'session_uuid', 'created_at']) """ event = {'id':7806,'event_type':'Shotgun_Entity_Change','attribute_name':'sg_status_list'} # Initialise the plugin. plugcollections = [sgED.PluginCollection( engine, s) for s in engine.config.getPluginPaths()] for plugc in plugcollections: plugc.load() q = Queue.LifoQueue() class MyThread(Thread): def __init__(self, group=None, target=None, name=None, args=(), kwargs={}): super(MyThread, self).__init__(group, target, name, args, kwargs) self.counter = 0 print("Create Thread") def worker_task(): global running global thread global event global plugcollections print("Thread started") while running: """ Put blocking code that might take longer to execute here """ time.sleep(1) #try: """ for plugc in plugcollections: plugc.process(event) """ thread.counter = thread.counter + 1 print(event) for plugc in plugcollections: print(plugc) plugc.process(event) q.put(thread.counter) #t = threading.currentThread() #print(thread.counter) #print(t.counter) #except Exception as e: # print("Exception %s" % e) print("Thread...") print("Thread stopped") @app.route("/") def index(): return render_template('simple.html',name=q.get()) if __name__ == "__main__": thread = MyThread(target = worker_task) thread.start() app.run()
explorations.py
# pylint: disable=wrong-import-order, wrong-import-position, too-many-nested-blocks # Imports: standard library import os import csv import copy import logging import argparse import datetime import multiprocessing as mp from typing import Any, Dict, List, Tuple, Union, Optional from functools import reduce from collections import OrderedDict, defaultdict # Imports: third party import h5py import numpy as np import pandas as pd import seaborn as sns # Imports: first party from ml4c3.plots import SUBPLOT_SIZE from ml4c3.datasets import ( make_dataset, tensors_to_sources, get_train_valid_test_ids, get_dicts_of_arrays_from_dataset, ) from tensormap.TensorMap import ( TensorMap, PatientData, Interpretation, update_tmaps, binary_channel_map, find_negative_label_and_channel, ) # fmt: off # need matplotlib -> Agg -> pyplot import matplotlib # isort:skip matplotlib.use("Agg") # isort:skip from matplotlib import pyplot as plt # isort:skip # fmt: on def explore( args: argparse.Namespace, disable_saving_output: bool = False, ) -> pd.DataFrame: args.batch_size = 256 cohort_counts: Dict[str, Any] = OrderedDict() src_path = args.tensors src_name = args.source_name src_join = args.join_tensors src_cols = None if args.join_tensors is None else list(src_join) src_time = args.time_tensor ref_path = args.reference_tensors ref_name = args.reference_name ref_join = args.reference_join_tensors ref_cols = None if args.reference_join_tensors is None else list(ref_join) ref_start = args.reference_start_time_tensor ref_end = args.reference_end_time_tensor number_per_window = args.number_per_window order_in_window = args.order_in_window windows = args.window_name match_exact_window = order_in_window is not None match_min_window = not match_exact_window match_any_window = args.match_any_window match_every_window = not match_any_window if (args.reference_join_tensors is not None) and ( args.explore_stratify_label is not None ): ref_cols.append(args.explore_stratify_label) # Ensure all required tmaps are parsed and modified as needed tmap_names_to_get = [] tmap_names_to_get += [] if src_join is None else src_join tmap_names_to_get += [] if src_time is None else [src_time] tmaps = {} for tmap_name in tmap_names_to_get: tmaps = update_tmaps(tmap_name=tmap_name, tmaps=tmaps) tmaps = args.tensor_maps_in + [tmaps[tmap_name] for tmap_name in tmap_names_to_get] required_tmaps: List[TensorMap] = [] for tmap in tmaps: if _tmap_requires_modification_for_explore(tmap=tmap): tmap = _modify_tmap_to_return_mean(tmap=tmap) required_tmaps.append(tmap) if isinstance(args.tensors, list) and len(args.tensors) > 1: df = _tensors_to_df_with_dataset( tensors=args.tensors, patient_csv=args.patient_csv, mrn_column_name=args.mrn_column_name, tensor_maps_in=required_tmaps, num_workers=args.num_workers, batch_size=args.batch_size, ) else: df = _tensors_to_df( tensor_maps_in=required_tmaps, tensors=args.tensors, num_workers=args.num_workers, patient_csv=args.patient_csv, mrn_column_name=args.mrn_column_name, valid_ratio=args.valid_ratio, test_ratio=args.test_ratio, train_csv=args.train_csv, valid_csv=args.valid_csv, test_csv=args.test_csv, output_folder=args.output_folder, export_error=args.explore_export_error, export_fpath=args.explore_export_fpath, export_generator=args.explore_export_generator, ) # Remove redundant columns for binary labels from df df = _remove_redundant_cols(tmaps=required_tmaps, df=df) # If time windows are specified, extend reference columns use_time = not any(arg is None for arg in [src_time, ref_start, ref_end]) if use_time: if len(ref_start) != len(ref_end): raise ValueError( f"Invalid time windows, got {len(ref_start)} starts and {len(ref_end)}" " ends", ) if order_in_window is None: # If not matching exactly N in time window, order_in_window is None # make array of blanks so zip doesnt break later order_in_window = [""] * len(ref_start) elif len(order_in_window) != len(ref_start): raise ValueError( f"Ambiguous time selection in time windows, got {len(order_in_window)}" f" order_in_window for {len(ref_start)} windows", ) if windows is None: windows = [str(i) for i in range(len(ref_start))] elif len(windows) != len(ref_start): raise ValueError( f"Ambiguous time window names, got {len(windows)} names for" f" {len(ref_start)} windows", ) # Ref start and end are lists of lists, defining time windows time_windows = list(zip(ref_start, ref_end)) # Each time window is defined by a tuples of two lists, # where the first list of each tuple defines the start point of the time window # and the second list of each tuple defines the end point of the time window for start, end in time_windows: # Each start/end point list is two elements, # where the first element in the list is the name of the time tensor # and the second element is the offset to the value of the time tensor # Add day offset of 0 start.append(0) end.append(0) # parse day offset as int start[1] = int(start[1]) end[1] = int(end[1]) # Add unique column names to ref_cols ref_cols.extend(_cols_from_time_windows(time_windows)) # If reference_tensors are given, perform cross-reference functionality if args.reference_tensors is not None: # If path to reference tensors is dir, parse HD5 files if os.path.isdir(args.reference_tensors): ref_tmaps_dict: Dict[str, TensorMap] = {} for tm_name in ref_cols: ref_tmaps_dict = update_tmaps(tm_name, ref_tmaps_dict) ref_tmaps = [ref_tmaps_dict[tm_name] for tm_name in ref_cols] df_ref = _tensors_to_df( tensor_maps_in=ref_tmaps, tensors=args.reference_tensors, num_workers=args.num_workers, patient_csv=args.patient_csv, valid_ratio=args.valid_ratio, test_ratio=args.test_ratio, train_csv=args.train_csv, valid_csv=args.valid_csv, test_csv=args.test_csv, output_folder=args.output_folder, export_error=args.explore_export_error, export_fpath=args.explore_export_fpath, export_generator=args.explore_export_generator, ) # Else, path to reference tensors is file (assume CSV) else: df_ref = pd.read_csv( filepath_or_buffer=args.reference_tensors, usecols=ref_cols, ) # Remove rows in df with NaNs for src_join, or type casting fails df.dropna(subset=src_join, inplace=True) df_ref.dropna(subset=ref_join, inplace=True) # Cast source column to ref column type df[src_join] = df[src_join].astype(df_ref[ref_join].dtypes[0]) if use_time: src_cols.append(src_time) # Count total and unique entries in df: source cohort_counts = _update_cohort_counts_len_and_unique( cohort_counts=cohort_counts, df=df, name=src_name, join_col=src_join, ) # Count total and unique entries in df: reference cohort_counts = _update_cohort_counts_len_and_unique( cohort_counts=cohort_counts, df=df_ref, name=ref_name, join_col=ref_join, ) # Format datetime cols and remove nan rows if use_time: df[src_time] = pd.to_datetime( df[src_time], errors="coerce", infer_datetime_format=True, ) df.dropna(subset=[src_time], inplace=True) for ref_time in _cols_from_time_windows(time_windows): df_ref[ref_time] = pd.to_datetime( df_ref[ref_time], errors="coerce", infer_datetime_format=True, ) df_ref.dropna(subset=_cols_from_time_windows(time_windows), inplace=True) # Iterate through time_windows, add day offsets to start and end strings, # update reference columns, and create new date columns with offsets in # reference dataframe time_windows_parsed = [] for start, end in time_windows: # Start time name, days = start[0], start[1] start_name_offset = _offset_ref_name(name=name, days=days) ref_cols = _offset_ref_cols(name=name, days=days, ref_cols=ref_cols) df_ref = _offset_ref_df( name=name, days=days, name_offset=start_name_offset, df=df_ref, ) # End time name, days = end[0], end[1] end_name_offset = _offset_ref_name(name=name, days=days) ref_cols = _offset_ref_cols(name=name, days=days, ref_cols=ref_cols) df_ref = _offset_ref_df( name=name, days=days, name_offset=end_name_offset, df=df_ref, ) # Append list with tuple of offset start and end names time_windows_parsed.append((start_name_offset, end_name_offset)) logging.info("Formatted datetime columns and removed unparsable rows") # Intersect with input tensors df on specified keys df_cross = df.merge( df_ref, how="inner", left_on=src_join, right_on=ref_join, ).sort_values(src_cols) logging.info("Cross-referenced using src and ref join tensors") # Calculate cohort counts on crossed dataframe cohort_counts = _update_cohort_counts_crossed_dataframe( cohort_counts=cohort_counts, df=df_cross, src_name=src_name, ref_name=ref_name, src_cols=src_cols, ref_cols=ref_cols, src_join=src_join, ref_join=ref_join, ) # If reference_tensor given, we have cross-referenced df # if not, just have source df # Select time subsets and generate subset dfs # (info either in source tensors, or reference_tensors) if use_time: # Get list of dfs with >=1 occurrence per time window dfs_cross_window = _get_df_per_window( df=df_cross, src_time=src_time, windows=time_windows_parsed, ) # Get list of dfs with >=N hits in any time window dfs_n_or_more_hits_any_window = _get_df_n_or_more_hits_any_window( dfs=dfs_cross_window, windows=time_windows_parsed, src_join=src_join, num_per_window=args.number_per_window, ) # --------------------------------------------------------------------------- # # Scenario 1: match_min_window and match_any_window # --------------------------------------------------------------------------- # if match_min_window and match_any_window: df_aggregated = _aggregate_time_windows( dfs=dfs_n_or_more_hits_any_window, windows=windows, src_cols=src_cols, ) logging.info( f"Cross-referenced so event occurs {number_per_window}+ times in" " any time window", ) title = f"{number_per_window}+ in any window" # --------------------------------------------------------------------------- # # Scenario 2: match_min_window and match_every_window # --------------------------------------------------------------------------- # # Given list of dfs with >=N occurrences in any time window, # isolate rows that have join_tensors across all windows if match_min_window and match_every_window: dfs = _intersect_time_windows( dfs=dfs_n_or_more_hits_any_window, src_join=src_join, ) df_aggregated = _aggregate_time_windows( dfs=dfs, windows=windows, src_cols=src_cols, ) logging.info( f"Cross-referenced so unique event occurs {number_per_window}+ times in" " all windows", ) title = f"{number_per_window}+ in every window" # --------------------------------------------------------------------------- # # Scenario 3: match_exact_window # --------------------------------------------------------------------------- # # Get exactly N occurrences in any time window if match_exact_window: dfs = [ _get_df_exactly_n_any_window( df=_df, order=order, start=start, end=end, src_join=src_join, number_per_window=number_per_window, ) for _df, order, (start, end) in zip( dfs_n_or_more_hits_any_window, order_in_window, time_windows_parsed, ) ] # What do we do if match_exact_window, but not match_any_window? # ? if match_exact_window and match_any_window: df_aggregated = _aggregate_time_windows( dfs=dfs, windows=windows, src_cols=src_cols, ) logging.info( "Cross-referenced so unique event occurs exactly " f"{number_per_window} times in any window", ) title = f"{number_per_window} in any window" # Add aggregated df to list of window dfs and # adjust title depending on cross-reference and time windowing if args.reference_tensors is None: title = "all" dfs = [df] windows = ["all"] else: if use_time: title = title.replace(" ", "-") dfs.append(df_aggregated) windows.append("all_windows") else: title = "crossref" dfs = [df_cross] windows = ["all"] # Iterate through time window names and dataframes for window, df_window in zip(windows, dfs): if args.explore_stratify_label is not None: if not disable_saving_output: _save_label_distribution( df=df_window, title=title, window=window, stratify_label=args.explore_stratify_label, output_folder=args.output_folder, ) # Calculate cross-referenced cohort counts cohort_counts = _update_cohort_counts( cohort_counts=cohort_counts, df=df_window, src_name=src_name, src_cols=src_cols, src_join=src_join, window=window, title=title, ) # Iterate over union and intersect of df and calculate summary statistics for _df, union_or_intersect in zip( [df_window, df_window.dropna()], ["union", "intersect"], ): # Get labels from dataframe of tensors labels = _get_labels( df=_df, explore_stratify_label=args.explore_stratify_label, ) # Iterate through interpretations for interpretation in [ Interpretation.CONTINUOUS, Interpretation.CATEGORICAL, Interpretation.LANGUAGE, Interpretation.EVENT, ]: # Initialize list of summary stats dicts for this interpretation stats_all = [] stats_keys = [] # Iterate over required tmaps for that interpretation for tm in [ tm for tm in required_tmaps if tm.interpretation is interpretation ]: # Plot continuous histograms of tensors for union if (union_or_intersect == "union") and ( interpretation is Interpretation.CONTINUOUS ): if not disable_saving_output: names = [tm.name] if tm.channel_map: names = [f"{tm.name}_{cm}" for cm in tm.channel_map] for name in names: _plot_histogram_continuous_tensor( tmap_name=name, df=_df, output_folder=args.output_folder, window=window, stratify_label=args.explore_stratify_label, image_ext=args.image_ext, ) # Iterate over label and isolate those df rows if stratified for label in labels: if isinstance(label, float) and np.isnan(label): continue df_label = ( _df if label == "all" else _df[_df[args.explore_stratify_label] == label] ) key_suffix = ( "" if label == "all" else f"_{args.explore_stratify_label}={label}" ) # Calculate summary statistics for that tmap if tm.channel_map: # If the channel map is binary, we only want to parse the # positive label and ignore the redundant negative label if binary_channel_map(tm=tm): key = tm.name stats = _calculate_summary_stats( df=df_label, key=key, interpretation=interpretation, ) stats_all.append(stats) stats_keys.append(f"{tm.name}{key_suffix}") # If not binary, parse each channel map else: for cm in tm.channel_map: key = f"{tm.name}_{cm}" stats = _calculate_summary_stats( df=df_label, key=key, interpretation=interpretation, ) stats_all.append(stats) stats_keys.append(f"{tm.name}_{cm}{key_suffix}") else: key = tm.name stats = _calculate_summary_stats( df=df_label, key=key, interpretation=interpretation, ) stats_all.append(stats) stats_keys.append(f"{tm.name}{key_suffix}") # Convert summary statistics into dataframe and save to CSV if not disable_saving_output and len(stats_all) > 0: df_stats = pd.DataFrame(data=stats_all, index=[stats_keys]) fpath = os.path.join( args.output_folder, f"stats_{interpretation}_{window}_{union_or_intersect}.csv", ) df_stats.round(3).to_csv(fpath) logging.info( f"{window} / {union_or_intersect} / {interpretation} tmaps: " f"saved summary stats to {fpath}", ) # Save tensors, including column with window name fpath = os.path.join(args.output_folder, "tensors_union.csv") # Time-windowed if use_time: if not disable_saving_output: df_aggregated.set_index(src_join, drop=True).to_csv(fpath) return_df = df_aggregated else: # No cross-reference if args.reference_tensors is None: if not disable_saving_output: df.to_csv(fpath, index=False) return_df = df # Cross-reference else: if not disable_saving_output: df_cross.set_index(src_join, drop=True).to_csv(fpath) return_df = df_cross if not disable_saving_output: logging.info(f"Saved tensors to {fpath}") # Save cohort counts to CSV fpath = os.path.join(args.output_folder, "cohort_counts.csv") df_cohort_counts = pd.DataFrame.from_dict( cohort_counts, orient="index", columns=["count"], ) df_cohort_counts = df_cohort_counts.rename_axis("description") if not disable_saving_output: df_cohort_counts.to_csv(fpath) logging.info(f"Saved cohort counts to {fpath}") return return_df def _remove_redundant_cols(tmaps: List[TensorMap], df: pd.DataFrame) -> pd.DataFrame: """Given a list of tensor maps, and a dataframe with tensors, checks categorical # tensor maps, parses binary tmaps to find the negative label, removes it from the dataframe, and simplifies the remaining positive column name""" if Interpretation.CATEGORICAL in [tm.interpretation for tm in tmaps]: for tm in [ tm for tm in tmaps if tm.interpretation is Interpretation.CATEGORICAL ]: # If two-element channel map, with "no_" in it, it is probably binary if binary_channel_map(tm=tm): # Find name of redundant label so we can drop it negative_label, _ = find_negative_label_and_channel(tm.channel_map) df.drop(f"{tm.name}_{negative_label}", axis=1, inplace=True) # Find name of non-redundant label positive_label = [ cm for cm in tm.channel_map if cm is not negative_label ][0] # Simplify name of positive label column df.rename( columns={f"{tm.name}_{positive_label}": f"{tm.name}"}, inplace=True, ) return df def _get_labels(df: pd.DataFrame, explore_stratify_label: str) -> list: """ Get list of unique non-NaN labels from dataframe """ labels = ["all"] if explore_stratify_label is not None: labels.extend( [ label for label in df[explore_stratify_label].unique() if not np.isnan(label) ], ) return labels def _plot_histogram_continuous_tensor( tmap_name: str, df: pd.DataFrame, output_folder: str, window: str, stratify_label: str, image_ext: str, ): sns.set_context("talk") plot_width = SUBPLOT_SIZE * 1.3 if stratify_label is not None else SUBPLOT_SIZE plot_height = SUBPLOT_SIZE * 0.6 fig = plt.figure(figsize=(plot_width, plot_height)) ax = plt.gca() plt.title(f"{tmap_name}: n={len(df)}") # Iterate through unique values of stratify label if stratify_label is not None: for stratify_label_value in df[stratify_label].unique(): n = sum(df[stratify_label] == stratify_label_value) legend_str = f"{stratify_label}={stratify_label_value} (n={n})" data = df[df[stratify_label] == stratify_label_value][tmap_name] kde = not np.isclose(data.var(), 0) if kde: sns.kdeplot(data, label=legend_str) sns.histplot(data, label=legend_str, stat="density") else: sns.displot(data, label=legend_str) plt.xlabel("Value") box = ax.get_position() ax.set_position([box.x0, box.y0, box.width * 0.8, box.height]) ax.legend(frameon=False, loc="center left", bbox_to_anchor=(1, 0.5)) else: data = df[tmap_name].to_numpy() kde = not np.isclose(data.var(), 0) if kde: sns.kdeplot(data) sns.histplot(data, stat="density") else: sns.displot(data, kde=kde) plt.xlabel("Value") fpath = os.path.join( output_folder, f"histogram_{tmap_name}_{window}{image_ext}", ) plt.savefig(fpath, dpi=150, bbox_inches="tight") plt.close() logging.info(f"Saved histogram of {tmap_name} to {fpath}") def _calculate_summary_stats(df: pd.DataFrame, key: str, interpretation) -> dict: stats = dict() if interpretation is Interpretation.CONTINUOUS: stats["min"] = df[key].min() stats["max"] = df[key].max() stats["mean"] = df[key].mean() stats["median"] = df[key].median() mode = df[key].mode() stats["mode"] = mode[0] if len(mode) != 0 else np.nan stats["variance"] = df[key].var() stats["stdev"] = df[key].std() stats["count"] = df[key].count() stats["missing"] = df[key].isna().sum() stats["total"] = len(df[key]) stats["missing_fraction"] = stats["missing"] / stats["total"] elif interpretation is Interpretation.CATEGORICAL: num_key = np.count_nonzero(df[key] == 1) stats["count"] = num_key stats["fraction"] = num_key / len(df[key]) stats["total"] = len(df[key]) elif interpretation is Interpretation.LANGUAGE: stats["count"] = df[key].count() stats["count_fraction"] = stats["count"] / df[key].shape[0] if stats["count"] == 0: stats["count_unique"] = 0 else: stats["count_unique"] = len(df[key].value_counts()) stats["missing"] = df[key].isna().sum() stats["missing_fraction"] = stats["missing"] / len(df[key]) stats["total"] = len(df[key]) elif interpretation is Interpretation.EVENT: stats["min"] = df[key].min() stats["max"] = df[key].max() stats["count"] = df[key].count() stats["count_fraction"] = stats["count"] / df[key].shape[0] stats["missing"] = df[key].isna().sum() stats["missing_fraction"] = stats["missing"] / len(df[key]) stats["total"] = len(df[key]) else: raise ValueError(f"Invalid interpretation: {interpretation}") return stats def _get_df_exactly_n_any_window( df: pd.DataFrame, order: str, start: str, end: str, src_join: List[str], number_per_window: int, ) -> pd.DataFrame: if order == "newest": df = df.groupby(src_join + [start, end]).tail(number_per_window) elif order == "oldest": df = df.groupby(src_join + [start, end]).head(number_per_window) elif order == "random": df = df.groupby(src_join + [start, end]).apply( lambda g: g.sample(number_per_window), ) else: raise NotImplementedError( f"Ordering for which rows to use in time window unknown: '{order}'", ) return df.reset_index(drop=True) def _update_cohort_counts( cohort_counts: dict, df: pd.DataFrame, src_name: str, src_cols: List, src_join: str, window: str, title: str, ) -> dict: cohort_counts[f"{window}: {src_name}"] = len(df) cohort_counts[f"{window}: {src_name} (unique by {src_cols}) / {title}"] = len( df.drop_duplicates(subset=src_cols), ) logging.info( f"Updated cross-reference counts for window ({window}) and title ({title})", ) return cohort_counts def _save_label_distribution( df: pd.DataFrame, title: str, window: str, stratify_label: str, output_folder: str, ): # Get counts for each value of stratify_label in df label_counts = df[stratify_label].value_counts(dropna=False).to_dict() labels = [f"{stratify_label}={label}" for label in label_counts.keys()] labels.append("all") counts = [count for count in label_counts.values()] counts.append(df.shape[0]) fractions = [count / df.shape[0] for count in counts] # Combine lists into dataframe df_label_distribution = pd.DataFrame( data=[counts, fractions], index=["count", "fraction"], columns=labels, ).T fpath = os.path.join( output_folder, f"label_distribution_{title}_{window}.csv", ) df_label_distribution.round(3).to_csv(fpath) logging.info(f"Saved {fpath}") def _intersect_time_windows( dfs: List[pd.DataFrame], src_join: list, ) -> List[pd.DataFrame]: for i, df in enumerate(dfs): if i == 0: intersect = df[src_join].drop_duplicates() else: intersect = intersect.merge(df[src_join].drop_duplicates()) join_tensor_intersect = reduce( lambda a, b: a.merge(b), [pd.DataFrame(df[src_join].drop_duplicates()) for df in dfs], ) # Filter list of dfs to only rows with join_tensors across all windows dfs_intersect = [df.merge(join_tensor_intersect) for df in dfs] return dfs_intersect def _aggregate_time_windows( dfs: List[pd.DataFrame], windows: list, src_cols: list, ) -> pd.DataFrame: """ Aggregate list of dataframes (one per time window) back into one dataframe with column indicating the time window index """ # Add time window column and value to each df in list for df, window in zip(dfs, windows): if "time_window" not in df: df["time_window"] = window # Concatenate dfs back together df_together = pd.concat(dfs, ignore_index=True).sort_values( by=src_cols + ["time_window"], ignore_index=True, ) return df_together def _get_df_per_window( df: pd.DataFrame, windows: List[Tuple], src_time: str, ) -> List[pd.DataFrame]: return [ df[(df[start] < df[src_time]) & (df[src_time] < df[end])] for start, end in windows ] def _get_df_n_or_more_hits_any_window( dfs: List[pd.DataFrame], windows: List[Tuple[str, str]], src_join: str, num_per_window: int, ) -> List[pd.DataFrame]: return [ df.groupby(src_join + [start, end]).filter(lambda g: len(g) >= num_per_window) for df, (start, end) in zip(dfs, windows) ] def _update_cohort_counts_len_and_unique( cohort_counts: dict, df: pd.DataFrame, name: str, join_col: str, ) -> dict: cohort_counts[f"{name} (total rows)"] = len(df) cohort_counts[f'{name} (unique {" + ".join(join_col)})'] = len( df.drop_duplicates(subset=join_col), ) return cohort_counts def _update_cohort_counts_crossed_dataframe( cohort_counts: dict, df: pd.DataFrame, src_name: str, ref_name: str, src_cols: list, ref_cols: list, src_join: str, ref_join: str, ) -> dict: cohort_counts[f'{src_name} in {ref_name} (unique {" + ".join(src_cols)})'] = len( df.drop_duplicates(subset=src_cols), ) cohort_counts[f'{src_name} in {ref_name} (unique {" + ".join(src_join)})'] = len( df.drop_duplicates(subset=src_join), ) cohort_counts[f"{ref_name} in {src_name} (unique joins + times + labels)"] = len( df.drop_duplicates(subset=ref_cols), ) cohort_counts[f'{ref_name} in {src_name} (unique {" + ".join(ref_join)})'] = len( df.drop_duplicates(subset=ref_join), ) return cohort_counts def _offset_ref_name(name: str, days: int) -> str: if days == 0: return name name = f"{name}_{days:+}_days" return name def _offset_ref_cols(name: str, days: int, ref_cols: list) -> list: if days == 0: return ref_cols ref_cols.append(f"{name}_{days:+}_days") return ref_cols def _offset_ref_df( name: str, days: int, name_offset: str, df: pd.DataFrame, ) -> pd.DataFrame: if name_offset not in df: df[name_offset] = df[name].apply(lambda x: x + datetime.timedelta(days=days)) return df class TensorsToDataFrameParallelWrapper: def __init__( self, tmaps, paths, num_workers, output_folder, export_error, export_fpath, export_generator, ): self.tmaps = tmaps self.paths = paths self.num_workers = num_workers self.total = len(paths) self.output_folder = output_folder self.export_error = export_error self.export_fpath = export_fpath self.export_generator = export_generator self.chunksize = self.total // num_workers self.counter = mp.Value("l", 1) def _hd5_to_disk(self, path, gen_name): with self.counter.get_lock(): i = self.counter.value if i % 1000 == 0: logging.info(f"Parsing {i}/{self.total} ({i/self.total*100:.1f}%) done") self.counter.value += 1 # each worker should write to it's own file pid = mp.current_process().pid fpath = os.path.join( self.output_folder, f"tensors_all_union_{pid}.csv", ) write_header = not os.path.isfile(fpath) try: with h5py.File(path, "r") as hd5: dict_of_tensor_dicts = defaultdict(dict) for tm in self.tmaps: shape = tm.shape try: tensors = tm.tensor_from_file(tm, hd5) if tm.time_series_limit is None: tensors = np.array([tensors]) for i, tensor in enumerate(tensors): if tensor is None: break try: tensor = tm.postprocess_tensor( tensor, augment=False, data=hd5, ) if tm.channel_map: for cm in tm.channel_map: dict_of_tensor_dicts[i][ f"{tm.name}_{cm}" ] = tensor[tm.channel_map[cm]] else: # If tensor is a scalar, isolate value in array; # otherwise, retain value as array if shape[0] == 1: if isinstance(tensor, np.ndarray): tensor = tensor.item() dict_of_tensor_dicts[i][tm.name] = tensor except ( IndexError, KeyError, ValueError, OSError, RuntimeError, ) as e: if tm.channel_map: for cm in tm.channel_map: dict_of_tensor_dicts[i][ f"{tm.name}_{cm}" ] = np.nan else: dict_of_tensor_dicts[i][tm.name] = np.full( shape, np.nan, )[0] if self.export_error: dict_of_tensor_dicts[i][f"error_type_{tm.name}"] = type( e, ).__name__ except ( IndexError, KeyError, ValueError, OSError, RuntimeError, ) as e: # Most likely error came from tensor_from_file and # dict_of_tensor_dicts is empty if tm.channel_map: for cm in tm.channel_map: dict_of_tensor_dicts[0][f"{tm.name}_{cm}"] = np.nan else: dict_of_tensor_dicts[0][tm.name] = np.full(shape, np.nan)[0] if self.export_error: dict_of_tensor_dicts[0][f"error_type_{tm.name}"] = type( e, ).__name__ for i in dict_of_tensor_dicts: if self.export_fpath: dict_of_tensor_dicts[i]["fpath"] = path if self.export_generator: dict_of_tensor_dicts[i]["generator"] = gen_name # write tdicts to disk if len(dict_of_tensor_dicts) > 0: keys = dict_of_tensor_dicts[0].keys() with open(fpath, "a") as output_file: dict_writer = csv.DictWriter(output_file, keys) if write_header: dict_writer.writeheader() dict_writer.writerows(dict_of_tensor_dicts.values()) except OSError as e: logging.info(f"OSError {e}") def mp_worker(self, worker_idx): start = worker_idx * self.chunksize end = start + self.chunksize if worker_idx == self.num_workers - 1: end = self.total for path, gen in self.paths[start:end]: self._hd5_to_disk(path, gen) def run(self): workers = [] for i in range(self.num_workers): worker = mp.Process(target=self.mp_worker, args=(i,)) worker.start() workers.append(worker) for worker in workers: worker.join() def _tensors_to_df_with_dataset( tensors: Union[str, List[Union[str, Tuple[str, str]]]], patient_csv: str, tensor_maps_in: List[TensorMap], num_workers: int, batch_size: int, mrn_column_name: Optional[str] = None, ): patient_ids, _, _ = get_train_valid_test_ids( tensors=tensors, mrn_column_name=mrn_column_name, patient_csv=patient_csv, valid_ratio=0, test_ratio=0, allow_empty_split=True, ) hd5_sources, csv_sources = tensors_to_sources(tensors, tensor_maps_in) dataset, stats, cleanup = make_dataset( data_split="explore", hd5_sources=hd5_sources, csv_sources=csv_sources, patient_ids=patient_ids, input_tmaps=tensor_maps_in, output_tmaps=[], batch_size=batch_size, num_workers=num_workers, cache=False, augment=False, validate=True, normalize=True, keep_ids=True, verbose=False, return_nan=True, ) data, _, patient_ids = get_dicts_of_arrays_from_dataset(dataset) logging.info(f"Extracted {len(data[tensor_maps_in[0].input_name])} tensors") cleanup() df = pd.DataFrame() df["patientid"] = patient_ids for tm in tensor_maps_in: tensor = data[tm.input_name] if tm.is_language: tensor = tensor.astype(str) if tm.channel_map is not None: for cm, idx in tm.channel_map.items(): df[f"{tm.name}_{cm}"] = tensor[:, idx] else: df[tm.name] = tensor[:, 0] logging.info("Reorganized tensors into dataframe") # Dataset should return tensors for a single patient in order, however tensors for # many patients may be interleaved. Stable sort by patientid groups tensors for # patients together and preserves order returned. df = df.sort_values("patientid", kind="mergesort") logging.info("Sorted tensors by patient ID") return df def _tensors_to_df( tensor_maps_in: List[TensorMap], tensors: str, num_workers: int, patient_csv: str = None, mrn_column_name: Optional[str] = None, valid_ratio: float = None, test_ratio: float = None, train_csv: str = None, valid_csv: str = None, test_csv: str = None, output_folder: str = "", export_error: bool = False, export_fpath: bool = False, export_generator: bool = False, ) -> pd.DataFrame: """ Create generators, load TMaps, call run method of class that parses tensors from HD5 files using TMaps and saves temporary CSVs, set dtypes, consolidate CSVs into single dataframe, and return dataframe. """ logging.info("Building generators for specified tensors") train_ids, valid_ids, test_ids = get_train_valid_test_ids( tensors=tensors, patient_csv=patient_csv, mrn_column_name=mrn_column_name, valid_ratio=valid_ratio, test_ratio=test_ratio, train_csv=train_csv, valid_csv=valid_csv, test_csv=test_csv, allow_empty_split=True, ) train_paths = [ os.path.join(tensors, f"{patient_id}.hd5") for patient_id in train_ids ] valid_paths = [ os.path.join(tensors, f"{patient_id}.hd5") for patient_id in valid_ids ] test_paths = [os.path.join(tensors, f"{patient_id}.hd5") for patient_id in test_ids] paths: List[Tuple[str, str]] = [] paths.extend(zip(train_paths, ["train"] * len(train_paths))) paths.extend(zip(valid_paths, ["valid"] * len(valid_paths))) paths.extend(zip(test_paths, ["test"] * len(test_paths))) tmaps = [tm for tm in tensor_maps_in] TensorsToDataFrameParallelWrapper( tmaps=tmaps, paths=paths, num_workers=num_workers, output_folder=output_folder, export_error=export_error, export_fpath=export_fpath, export_generator=export_generator, ).run() # Get columns that should have dtype 'string' instead of dtype 'O' str_cols_list: List[str] = [] if export_fpath: str_cols_list.extend("fpath") if export_generator: str_cols_list.extend("generator") for tm in tmaps: if tm.interpretation == Interpretation.LANGUAGE: str_cols_list.extend( [f"{tm.name}_{cm}" for cm in tm.channel_map] if tm.channel_map else [tm.name], ) str_cols_list.append(f"error_type_{tm.name}") str_cols = {key: "string" for key in str_cols_list} # Consolidate temporary CSV files into one dataframe temp_files = [] df_list = [] for name in os.listdir(output_folder): if "tensors_all_union_" in name: fpath = os.path.join(output_folder, name) _df = pd.read_csv(fpath, dtype=str_cols) logging.debug(f"Loaded {fpath} into memory") df_list.append(_df) logging.debug(f"Appended {fpath} to list of dataframes") temp_files.append(fpath) df = pd.concat(df_list, ignore_index=True) logging.info( f"{len(df)} samples extracted from {len(paths)} hd5 files using {len(tmaps)}" " tmaps, and consolidated to one DataFrame", ) # Delete temporary files for fpath in temp_files: os.remove(fpath) logging.debug(f"Deleted {len(temp_files)} temporary files") return df def _modify_tmap_to_return_mean(tmap: TensorMap) -> TensorMap: """Modifies tm so it returns it's mean unless previous tensor from file fails""" new_tm = copy.deepcopy(tmap) new_tm.shape = (1,) new_tm.interpretation = Interpretation.CONTINUOUS new_tm.channel_map = None new_tm.validators = None new_tm.augmenters = None new_tm.normalizers = None new_tm.name = f"{tmap.name}_mean" def tff(_: TensorMap, data: PatientData): _tensor = tmap.tensor_from_file(tmap, data) if tmap.time_series_limit is None: _tensor = np.array([_tensor]) for i in range(len(_tensor)): try: _tensor[i] = tmap.postprocess_tensor(_tensor[i], data, augment=False) except: _tensor[i] = np.nan tensor = _tensor.mean(axis=tuple(range(len(_tensor.shape)))[1:])[:, None] if tmap.time_series_limit is None: tensor = tensor[0] return tensor new_tm.tensor_from_file = tff return new_tm def _tmap_requires_modification_for_explore(tmap: TensorMap) -> bool: """Whether a tmap has to be modified to be used in explore""" if tmap.is_continuous: return tmap.axes > 1 or (tmap.shape != (1,) and tmap.channel_map is None) if tmap.is_categorical: return tmap.axes > 1 if tmap.is_language or tmap.is_event: return False return True def _cols_from_time_windows(time_windows): return {time_point[0] for time_window in time_windows for time_point in time_window} def continuous_explore_header(tm: TensorMap) -> str: return tm.name def categorical_explore_header(tm: TensorMap, channel: str) -> str: return f"{tm.name}_{channel}"
__init__.py
#!/usr/bin/python3 # @todo logging # @todo extra options for url like , verify=False etc. # @todo enable https://urllib3.readthedocs.io/en/latest/user-guide.html#ssl as option? # @todo option for interval day/6 hour/etc # @todo on change detected, config for calling some API # @todo fetch title into json # https://distill.io/features # proxy per check # - flask_cors, itsdangerous,MarkupSafe import datetime import os import queue import threading import time from copy import deepcopy from threading import Event import flask_login import pytz import timeago from feedgen.feed import FeedGenerator from flask import ( Flask, abort, flash, make_response, redirect, render_template, request, send_from_directory, session, url_for, ) from flask_login import login_required from flask_wtf import CSRFProtect from changedetectionio import html_tools __version__ = '0.39.13.1' datastore = None # Local running_update_threads = [] ticker_thread = None extra_stylesheets = [] update_q = queue.Queue() notification_q = queue.Queue() app = Flask(__name__, static_url_path="", static_folder="static", template_folder="templates") # Stop browser caching of assets app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0 app.config.exit = Event() app.config['NEW_VERSION_AVAILABLE'] = False app.config['LOGIN_DISABLED'] = False #app.config["EXPLAIN_TEMPLATE_LOADING"] = True # Disables caching of the templates app.config['TEMPLATES_AUTO_RELOAD'] = True csrf = CSRFProtect() csrf.init_app(app) notification_debug_log=[] def init_app_secret(datastore_path): secret = "" path = "{}/secret.txt".format(datastore_path) try: with open(path, "r") as f: secret = f.read() except FileNotFoundError: import secrets with open(path, "w") as f: secret = secrets.token_hex(32) f.write(secret) return secret # We use the whole watch object from the store/JSON so we can see if there's some related status in terms of a thread # running or something similar. @app.template_filter('format_last_checked_time') def _jinja2_filter_datetime(watch_obj, format="%Y-%m-%d %H:%M:%S"): # Worker thread tells us which UUID it is currently processing. for t in running_update_threads: if t.current_uuid == watch_obj['uuid']: return "Checking now.." if watch_obj['last_checked'] == 0: return 'Not yet' return timeago.format(int(watch_obj['last_checked']), time.time()) # @app.context_processor # def timeago(): # def _timeago(lower_time, now): # return timeago.format(lower_time, now) # return dict(timeago=_timeago) @app.template_filter('format_timestamp_timeago') def _jinja2_filter_datetimestamp(timestamp, format="%Y-%m-%d %H:%M:%S"): return timeago.format(timestamp, time.time()) # return timeago.format(timestamp, time.time()) # return datetime.datetime.utcfromtimestamp(timestamp).strftime(format) # When nobody is logged in Flask-Login's current_user is set to an AnonymousUser object. class User(flask_login.UserMixin): id=None def set_password(self, password): return True def get_user(self, email="defaultuser@changedetection.io"): return self def is_authenticated(self): return True def is_active(self): return True def is_anonymous(self): return False def get_id(self): return str(self.id) # Compare given password against JSON store or Env var def check_password(self, password): import base64 import hashlib # Can be stored in env (for deployments) or in the general configs raw_salt_pass = os.getenv("SALTED_PASS", False) if not raw_salt_pass: raw_salt_pass = datastore.data['settings']['application']['password'] raw_salt_pass = base64.b64decode(raw_salt_pass) salt_from_storage = raw_salt_pass[:32] # 32 is the length of the salt # Use the exact same setup you used to generate the key, but this time put in the password to check new_key = hashlib.pbkdf2_hmac( 'sha256', password.encode('utf-8'), # Convert the password to bytes salt_from_storage, 100000 ) new_key = salt_from_storage + new_key return new_key == raw_salt_pass pass def changedetection_app(config=None, datastore_o=None): global datastore datastore = datastore_o #app.config.update(config or {}) login_manager = flask_login.LoginManager(app) login_manager.login_view = 'login' app.secret_key = init_app_secret(config['datastore_path']) # Setup cors headers to allow all domains # https://flask-cors.readthedocs.io/en/latest/ # CORS(app) @login_manager.user_loader def user_loader(email): user = User() user.get_user(email) return user @login_manager.unauthorized_handler def unauthorized_handler(): # @todo validate its a URL of this host and use that return redirect(url_for('login', next=url_for('index'))) @app.route('/logout') def logout(): flask_login.logout_user() return redirect(url_for('index')) # https://github.com/pallets/flask/blob/93dd1709d05a1cf0e886df6223377bdab3b077fb/examples/tutorial/flaskr/__init__.py#L39 # You can divide up the stuff like this @app.route('/login', methods=['GET', 'POST']) def login(): if not datastore.data['settings']['application']['password'] and not os.getenv("SALTED_PASS", False): flash("Login not required, no password enabled.", "notice") return redirect(url_for('index')) if request.method == 'GET': if flask_login.current_user.is_authenticated: flash("Already logged in") return redirect(url_for("index")) output = render_template("login.html") return output user = User() user.id = "defaultuser@changedetection.io" password = request.form.get('password') if (user.check_password(password)): flask_login.login_user(user, remember=True) # For now there's nothing else interesting here other than the index/list page # It's more reliable and safe to ignore the 'next' redirect # When we used... # next = request.args.get('next') # return redirect(next or url_for('index')) # We would sometimes get login loop errors on sites hosted in sub-paths # note for the future: # if not is_safe_url(next): # return flask.abort(400) return redirect(url_for('index')) else: flash('Incorrect password', 'error') return redirect(url_for('login')) @app.before_request def do_something_whenever_a_request_comes_in(): # Disable password login if there is not one set # (No password in settings or env var) app.config['LOGIN_DISABLED'] = datastore.data['settings']['application']['password'] == False and os.getenv("SALTED_PASS", False) == False # Set the auth cookie path if we're running as X-settings/X-Forwarded-Prefix if os.getenv('USE_X_SETTINGS') and 'X-Forwarded-Prefix' in request.headers: app.config['REMEMBER_COOKIE_PATH'] = request.headers['X-Forwarded-Prefix'] app.config['SESSION_COOKIE_PATH'] = request.headers['X-Forwarded-Prefix'] # For the RSS path, allow access via a token if request.path == '/rss' and request.args.get('token'): app_rss_token = datastore.data['settings']['application']['rss_access_token'] rss_url_token = request.args.get('token') if app_rss_token == rss_url_token: app.config['LOGIN_DISABLED'] = True @app.route("/rss", methods=['GET']) @login_required def rss(): from . import diff limit_tag = request.args.get('tag') # Sort by last_changed and add the uuid which is usually the key.. sorted_watches = [] # @todo needs a .itemsWithTag() or something for uuid, watch in datastore.data['watching'].items(): if limit_tag != None: # Support for comma separated list of tags. for tag_in_watch in watch['tag'].split(','): tag_in_watch = tag_in_watch.strip() if tag_in_watch == limit_tag: watch['uuid'] = uuid sorted_watches.append(watch) else: watch['uuid'] = uuid sorted_watches.append(watch) sorted_watches.sort(key=lambda x: x['last_changed'], reverse=True) fg = FeedGenerator() fg.title('changedetection.io') fg.description('Feed description') fg.link(href='https://changedetection.io') for watch in sorted_watches: dates = list(watch['history'].keys()) # Re #521 - Don't bother processing this one if theres less than 2 snapshots, means we never had a change detected. if len(dates) < 2: continue # Convert to int, sort and back to str again # @todo replace datastore getter that does this automatically dates = [int(i) for i in dates] dates.sort(reverse=True) dates = [str(i) for i in dates] prev_fname = watch['history'][dates[1]] if not watch['viewed']: # Re #239 - GUID needs to be individual for each event # @todo In the future make this a configurable link back (see work on BASE_URL https://github.com/dgtlmoon/changedetection.io/pull/228) guid = "{}/{}".format(watch['uuid'], watch['last_changed']) fe = fg.add_entry() # Include a link to the diff page, they will have to login here to see if password protection is enabled. # Description is the page you watch, link takes you to the diff JS UI page base_url = datastore.data['settings']['application']['base_url'] if base_url == '': base_url = "<base-url-env-var-not-set>" diff_link = {'href': "{}{}".format(base_url, url_for('diff_history_page', uuid=watch['uuid']))} fe.link(link=diff_link) # @todo watch should be a getter - watch.get('title') (internally if URL else..) watch_title = watch.get('title') if watch.get('title') else watch.get('url') fe.title(title=watch_title) latest_fname = watch['history'][dates[0]] html_diff = diff.render_diff(prev_fname, latest_fname, include_equal=False, line_feed_sep="</br>") fe.description(description="<![CDATA[<html><body><h4>{}</h4>{}</body></html>".format(watch_title, html_diff)) fe.guid(guid, permalink=False) dt = datetime.datetime.fromtimestamp(int(watch['newest_history_key'])) dt = dt.replace(tzinfo=pytz.UTC) fe.pubDate(dt) response = make_response(fg.rss_str()) response.headers.set('Content-Type', 'application/rss+xml') return response @app.route("/", methods=['GET']) @login_required def index(): from changedetectionio import forms limit_tag = request.args.get('tag') pause_uuid = request.args.get('pause') # Redirect for the old rss path which used the /?rss=true if request.args.get('rss'): return redirect(url_for('rss', tag=limit_tag)) if pause_uuid: try: datastore.data['watching'][pause_uuid]['paused'] ^= True datastore.needs_write = True return redirect(url_for('index', tag = limit_tag)) except KeyError: pass # Sort by last_changed and add the uuid which is usually the key.. sorted_watches = [] for uuid, watch in datastore.data['watching'].items(): if limit_tag != None: # Support for comma separated list of tags. for tag_in_watch in watch['tag'].split(','): tag_in_watch = tag_in_watch.strip() if tag_in_watch == limit_tag: watch['uuid'] = uuid sorted_watches.append(watch) else: watch['uuid'] = uuid sorted_watches.append(watch) sorted_watches.sort(key=lambda x: x['last_changed'], reverse=True) existing_tags = datastore.get_all_tags() form = forms.quickWatchForm(request.form) output = render_template("watch-overview.html", form=form, watches=sorted_watches, tags=existing_tags, active_tag=limit_tag, app_rss_token=datastore.data['settings']['application']['rss_access_token'], has_unviewed=datastore.data['has_unviewed'], # Don't link to hosting when we're on the hosting environment hosted_sticky=os.getenv("SALTED_PASS", False) == False, guid=datastore.data['app_guid'], queued_uuids=update_q.queue) if session.get('share-link'): del(session['share-link']) return output # AJAX endpoint for sending a test @app.route("/notification/send-test", methods=['POST']) @login_required def ajax_callback_send_notification_test(): import apprise apobj = apprise.Apprise() # validate URLS if not len(request.form['notification_urls'].strip()): return make_response({'error': 'No Notification URLs set'}, 400) for server_url in request.form['notification_urls'].splitlines(): if len(server_url.strip()): if not apobj.add(server_url): message = '{} is not a valid AppRise URL.'.format(server_url) return make_response({'error': message}, 400) try: n_object = {'watch_url': request.form['window_url'], 'notification_urls': request.form['notification_urls'].splitlines(), 'notification_title': request.form['notification_title'].strip(), 'notification_body': request.form['notification_body'].strip(), 'notification_format': request.form['notification_format'].strip() } notification_q.put(n_object) except Exception as e: return make_response({'error': str(e)}, 400) return 'OK' @app.route("/scrub", methods=['GET', 'POST']) @login_required def scrub_page(): if request.method == 'POST': confirmtext = request.form.get('confirmtext') if confirmtext == 'scrub': changes_removed = 0 for uuid in datastore.data['watching'].keys(): datastore.scrub_watch(uuid) flash("Cleared all snapshot history") else: flash('Incorrect confirmation text.', 'error') return redirect(url_for('index')) output = render_template("scrub.html") return output # If they edited an existing watch, we need to know to reset the current/previous md5 to include # the excluded text. def get_current_checksum_include_ignore_text(uuid): import hashlib from changedetectionio import fetch_site_status # Get the most recent one newest_history_key = datastore.get_val(uuid, 'newest_history_key') # 0 means that theres only one, so that there should be no 'unviewed' history available if newest_history_key == 0: newest_history_key = list(datastore.data['watching'][uuid]['history'].keys())[0] if newest_history_key: with open(datastore.data['watching'][uuid]['history'][newest_history_key], encoding='utf-8') as file: raw_content = file.read() handler = fetch_site_status.perform_site_check(datastore=datastore) stripped_content = html_tools.strip_ignore_text(raw_content, datastore.data['watching'][uuid]['ignore_text']) if datastore.data['settings']['application'].get('ignore_whitespace', False): checksum = hashlib.md5(stripped_content.translate(None, b'\r\n\t ')).hexdigest() else: checksum = hashlib.md5(stripped_content).hexdigest() return checksum return datastore.data['watching'][uuid]['previous_md5'] @app.route("/edit/<string:uuid>", methods=['GET', 'POST']) @login_required # https://stackoverflow.com/questions/42984453/wtforms-populate-form-with-data-if-data-exists # https://wtforms.readthedocs.io/en/3.0.x/forms/#wtforms.form.Form.populate_obj ? def edit_page(uuid): from changedetectionio import forms using_default_check_time = True # More for testing, possible to return the first/only if not datastore.data['watching'].keys(): flash("No watches to edit", "error") return redirect(url_for('index')) if uuid == 'first': uuid = list(datastore.data['watching'].keys()).pop() if not uuid in datastore.data['watching']: flash("No watch with the UUID %s found." % (uuid), "error") return redirect(url_for('index')) # be sure we update with a copy instead of accidently editing the live object by reference default = deepcopy(datastore.data['watching'][uuid]) # Show system wide default if nothing configured if datastore.data['watching'][uuid]['fetch_backend'] is None: default['fetch_backend'] = datastore.data['settings']['application']['fetch_backend'] # Show system wide default if nothing configured if all(value == 0 or value == None for value in datastore.data['watching'][uuid]['time_between_check'].values()): default['time_between_check'] = deepcopy(datastore.data['settings']['requests']['time_between_check']) # Defaults for proxy choice if datastore.proxy_list is not None: # When enabled system_proxy = datastore.data['settings']['requests']['proxy'] if default['proxy'] is None: default['proxy'] = system_proxy else: # Does the chosen one exist? if not any(default['proxy'] in tup for tup in datastore.proxy_list): default['proxy'] = datastore.proxy_list[0][0] # Used by the form handler to keep or remove the proxy settings default['proxy_list'] = datastore.proxy_list # proxy_override set to the json/text list of the items form = forms.watchForm(formdata=request.form if request.method == 'POST' else None, data=default, ) if datastore.proxy_list is None: # @todo - Couldn't get setattr() etc dynamic addition working, so remove it instead del form.proxy else: form.proxy.choices = datastore.proxy_list if default['proxy'] is None: form.proxy.default='http://hello' if request.method == 'POST' and form.validate(): extra_update_obj = {} # Re #110, if they submit the same as the default value, set it to None, so we continue to follow the default # Assume we use the default value, unless something relevant is different, then use the form value # values could be None, 0 etc. # Set to None unless the next for: says that something is different extra_update_obj['time_between_check'] = dict.fromkeys(form.time_between_check.data) for k, v in form.time_between_check.data.items(): if v and v != datastore.data['settings']['requests']['time_between_check'][k]: extra_update_obj['time_between_check'] = form.time_between_check.data using_default_check_time = False break # Use the default if its the same as system wide if form.fetch_backend.data == datastore.data['settings']['application']['fetch_backend']: extra_update_obj['fetch_backend'] = None # Notification URLs datastore.data['watching'][uuid]['notification_urls'] = form.notification_urls.data # Ignore text form_ignore_text = form.ignore_text.data datastore.data['watching'][uuid]['ignore_text'] = form_ignore_text # Reset the previous_md5 so we process a new snapshot including stripping ignore text. if form_ignore_text: if len(datastore.data['watching'][uuid]['history']): extra_update_obj['previous_md5'] = get_current_checksum_include_ignore_text(uuid=uuid) # Reset the previous_md5 so we process a new snapshot including stripping ignore text. if form.css_filter.data.strip() != datastore.data['watching'][uuid]['css_filter']: if len(datastore.data['watching'][uuid]['history']): extra_update_obj['previous_md5'] = get_current_checksum_include_ignore_text(uuid=uuid) datastore.data['watching'][uuid].update(form.data) datastore.data['watching'][uuid].update(extra_update_obj) flash("Updated watch.") # Re #286 - We wait for syncing new data to disk in another thread every 60 seconds # But in the case something is added we should save straight away datastore.needs_write_urgent = True # Queue the watch for immediate recheck update_q.put(uuid) # Diff page [edit] link should go back to diff page if request.args.get("next") and request.args.get("next") == 'diff' and not form.save_and_preview_button.data: return redirect(url_for('diff_history_page', uuid=uuid)) else: if form.save_and_preview_button.data: flash('You may need to reload this page to see the new content.') return redirect(url_for('preview_page', uuid=uuid)) else: return redirect(url_for('index')) else: if request.method == 'POST' and not form.validate(): flash("An error occurred, please see below.", "error") output = render_template("edit.html", uuid=uuid, watch=datastore.data['watching'][uuid], form=form, has_empty_checktime=using_default_check_time, current_base_url=datastore.data['settings']['application']['base_url'], emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False) ) return output @app.route("/settings", methods=['GET', "POST"]) @login_required def settings_page(): from changedetectionio import content_fetcher, forms default = deepcopy(datastore.data['settings']) if datastore.proxy_list is not None: # When enabled system_proxy = datastore.data['settings']['requests']['proxy'] # In the case it doesnt exist anymore if not any([system_proxy in tup for tup in datastore.proxy_list]): system_proxy = None default['requests']['proxy'] = system_proxy if system_proxy is not None else datastore.proxy_list[0][0] # Used by the form handler to keep or remove the proxy settings default['proxy_list'] = datastore.proxy_list # Don't use form.data on POST so that it doesnt overrid the checkbox status from the POST status form = forms.globalSettingsForm(formdata=request.form if request.method == 'POST' else None, data=default ) if datastore.proxy_list is None: # @todo - Couldn't get setattr() etc dynamic addition working, so remove it instead del form.requests.form.proxy else: form.requests.form.proxy.choices = datastore.proxy_list if request.method == 'POST': # Password unset is a GET, but we can lock the session to a salted env password to always need the password if form.application.form.data.get('removepassword_button', False): # SALTED_PASS means the password is "locked" to what we set in the Env var if not os.getenv("SALTED_PASS", False): datastore.remove_password() flash("Password protection removed.", 'notice') flask_login.logout_user() return redirect(url_for('settings_page')) if form.validate(): datastore.data['settings']['application'].update(form.data['application']) datastore.data['settings']['requests'].update(form.data['requests']) if not os.getenv("SALTED_PASS", False) and len(form.application.form.password.encrypted_password): datastore.data['settings']['application']['password'] = form.application.form.password.encrypted_password datastore.needs_write_urgent = True flash("Password protection enabled.", 'notice') flask_login.logout_user() return redirect(url_for('index')) datastore.needs_write_urgent = True flash("Settings updated.") else: flash("An error occurred, please see below.", "error") output = render_template("settings.html", form=form, current_base_url = datastore.data['settings']['application']['base_url'], hide_remove_pass=os.getenv("SALTED_PASS", False), emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False)) return output @app.route("/import", methods=['GET', "POST"]) @login_required def import_page(): remaining_urls = [] if request.method == 'POST': from .importer import import_url_list, import_distill_io_json # URL List import if request.values.get('urls') and len(request.values.get('urls').strip()): # Import and push into the queue for immediate update check importer = import_url_list() importer.run(data=request.values.get('urls'), flash=flash, datastore=datastore) for uuid in importer.new_uuids: update_q.put(uuid) if len(importer.remaining_data) == 0: return redirect(url_for('index')) else: remaining_urls = importer.remaining_data # Distill.io import if request.values.get('distill-io') and len(request.values.get('distill-io').strip()): # Import and push into the queue for immediate update check d_importer = import_distill_io_json() d_importer.run(data=request.values.get('distill-io'), flash=flash, datastore=datastore) for uuid in d_importer.new_uuids: update_q.put(uuid) # Could be some remaining, or we could be on GET output = render_template("import.html", import_url_list_remaining="\n".join(remaining_urls), original_distill_json='' ) return output # Clear all statuses, so we do not see the 'unviewed' class @app.route("/api/mark-all-viewed", methods=['GET']) @login_required def mark_all_viewed(): # Save the current newest history as the most recently viewed for watch_uuid, watch in datastore.data['watching'].items(): datastore.set_last_viewed(watch_uuid, watch['newest_history_key']) flash("Cleared all statuses.") return redirect(url_for('index')) @app.route("/diff/<string:uuid>", methods=['GET']) @login_required def diff_history_page(uuid): # More for testing, possible to return the first/only if uuid == 'first': uuid = list(datastore.data['watching'].keys()).pop() extra_stylesheets = [url_for('static_content', group='styles', filename='diff.css')] try: watch = datastore.data['watching'][uuid] except KeyError: flash("No history found for the specified link, bad link?", "error") return redirect(url_for('index')) dates = list(watch['history'].keys()) # Convert to int, sort and back to str again # @todo replace datastore getter that does this automatically dates = [int(i) for i in dates] dates.sort(reverse=True) dates = [str(i) for i in dates] if len(dates) < 2: flash("Not enough saved change detection snapshots to produce a report.", "error") return redirect(url_for('index')) # Save the current newest history as the most recently viewed datastore.set_last_viewed(uuid, dates[0]) newest_file = watch['history'][dates[0]] try: with open(newest_file, 'r') as f: newest_version_file_contents = f.read() except Exception as e: newest_version_file_contents = "Unable to read {}.\n".format(newest_file) previous_version = request.args.get('previous_version') try: previous_file = watch['history'][previous_version] except KeyError: # Not present, use a default value, the second one in the sorted list. previous_file = watch['history'][dates[1]] try: with open(previous_file, 'r') as f: previous_version_file_contents = f.read() except Exception as e: previous_version_file_contents = "Unable to read {}.\n".format(previous_file) screenshot_url = datastore.get_screenshot(uuid) output = render_template("diff.html", watch_a=watch, newest=newest_version_file_contents, previous=previous_version_file_contents, extra_stylesheets=extra_stylesheets, versions=dates[1:], uuid=uuid, newest_version_timestamp=dates[0], current_previous_version=str(previous_version), current_diff_url=watch['url'], extra_title=" - Diff - {}".format(watch['title'] if watch['title'] else watch['url']), left_sticky=True, screenshot=screenshot_url) return output @app.route("/preview/<string:uuid>", methods=['GET']) @login_required def preview_page(uuid): content = [] ignored_line_numbers = [] trigger_line_numbers = [] # More for testing, possible to return the first/only if uuid == 'first': uuid = list(datastore.data['watching'].keys()).pop() extra_stylesheets = [url_for('static_content', group='styles', filename='diff.css')] try: watch = datastore.data['watching'][uuid] except KeyError: flash("No history found for the specified link, bad link?", "error") return redirect(url_for('index')) if len(watch['history']): timestamps = sorted(watch['history'].keys(), key=lambda x: int(x)) filename = watch['history'][timestamps[-1]] try: with open(filename, 'r') as f: tmp = f.readlines() # Get what needs to be highlighted ignore_rules = watch.get('ignore_text', []) + datastore.data['settings']['application']['global_ignore_text'] # .readlines will keep the \n, but we will parse it here again, in the future tidy this up ignored_line_numbers = html_tools.strip_ignore_text(content="".join(tmp), wordlist=ignore_rules, mode='line numbers' ) trigger_line_numbers = html_tools.strip_ignore_text(content="".join(tmp), wordlist=watch['trigger_text'], mode='line numbers' ) # Prepare the classes and lines used in the template i=0 for l in tmp: classes=[] i+=1 if i in ignored_line_numbers: classes.append('ignored') if i in trigger_line_numbers: classes.append('triggered') content.append({'line': l, 'classes': ' '.join(classes)}) except Exception as e: content.append({'line': "File doesnt exist or unable to read file {}".format(filename), 'classes': ''}) else: content.append({'line': "No history found", 'classes': ''}) screenshot_url = datastore.get_screenshot(uuid) output = render_template("preview.html", content=content, extra_stylesheets=extra_stylesheets, ignored_line_numbers=ignored_line_numbers, triggered_line_numbers=trigger_line_numbers, current_diff_url=watch['url'], screenshot=screenshot_url, watch=watch, uuid=uuid) return output @app.route("/settings/notification-logs", methods=['GET']) @login_required def notification_logs(): global notification_debug_log output = render_template("notification-log.html", logs=notification_debug_log if len(notification_debug_log) else ["No errors or warnings detected"]) return output @app.route("/api/<string:uuid>/snapshot/current", methods=['GET']) @login_required def api_snapshot(uuid): # More for testing, possible to return the first/only if uuid == 'first': uuid = list(datastore.data['watching'].keys()).pop() try: watch = datastore.data['watching'][uuid] except KeyError: return abort(400, "No history found for the specified link, bad link?") newest = list(watch['history'].keys())[-1] with open(watch['history'][newest], 'r') as f: content = f.read() resp = make_response(content) resp.headers['Content-Type'] = 'text/plain' return resp @app.route("/favicon.ico", methods=['GET']) def favicon(): return send_from_directory("static/images", path="favicon.ico") # We're good but backups are even better! @app.route("/backup", methods=['GET']) @login_required def get_backup(): import zipfile from pathlib import Path # Remove any existing backup file, for now we just keep one file for previous_backup_filename in Path(datastore_o.datastore_path).rglob('changedetection-backup-*.zip'): os.unlink(previous_backup_filename) # create a ZipFile object backupname = "changedetection-backup-{}.zip".format(int(time.time())) # We only care about UUIDS from the current index file uuids = list(datastore.data['watching'].keys()) backup_filepath = os.path.join(datastore_o.datastore_path, backupname) with zipfile.ZipFile(backup_filepath, "w", compression=zipfile.ZIP_DEFLATED, compresslevel=8) as zipObj: # Be sure we're written fresh datastore.sync_to_json() # Add the index zipObj.write(os.path.join(datastore_o.datastore_path, "url-watches.json"), arcname="url-watches.json") # Add the flask app secret zipObj.write(os.path.join(datastore_o.datastore_path, "secret.txt"), arcname="secret.txt") # Add any snapshot data we find, use the full path to access the file, but make the file 'relative' in the Zip. for txt_file_path in Path(datastore_o.datastore_path).rglob('*.txt'): parent_p = txt_file_path.parent if parent_p.name in uuids: zipObj.write(txt_file_path, arcname=str(txt_file_path).replace(datastore_o.datastore_path, ''), compress_type=zipfile.ZIP_DEFLATED, compresslevel=8) # Create a list file with just the URLs, so it's easier to port somewhere else in the future list_file = "url-list.txt" with open(os.path.join(datastore_o.datastore_path, list_file), "w") as f: for uuid in datastore.data["watching"]: url = datastore.data["watching"][uuid]["url"] f.write("{}\r\n".format(url)) list_with_tags_file = "url-list-with-tags.txt" with open( os.path.join(datastore_o.datastore_path, list_with_tags_file), "w" ) as f: for uuid in datastore.data["watching"]: url = datastore.data["watching"][uuid]["url"] tag = datastore.data["watching"][uuid]["tag"] f.write("{} {}\r\n".format(url, tag)) # Add it to the Zip zipObj.write( os.path.join(datastore_o.datastore_path, list_file), arcname=list_file, compress_type=zipfile.ZIP_DEFLATED, compresslevel=8, ) zipObj.write( os.path.join(datastore_o.datastore_path, list_with_tags_file), arcname=list_with_tags_file, compress_type=zipfile.ZIP_DEFLATED, compresslevel=8, ) # Send_from_directory needs to be the full absolute path return send_from_directory(os.path.abspath(datastore_o.datastore_path), backupname, as_attachment=True) @app.route("/static/<string:group>/<string:filename>", methods=['GET']) def static_content(group, filename): if group == 'screenshot': from flask import make_response # Could be sensitive, follow password requirements if datastore.data['settings']['application']['password'] and not flask_login.current_user.is_authenticated: abort(403) # These files should be in our subdirectory try: # set nocache, set content-type watch_dir = datastore_o.datastore_path + "/" + filename response = make_response(send_from_directory(filename="last-screenshot.png", directory=watch_dir, path=watch_dir + "/last-screenshot.png")) response.headers['Content-type'] = 'image/png' response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate' response.headers['Pragma'] = 'no-cache' response.headers['Expires'] = 0 return response except FileNotFoundError: abort(404) # These files should be in our subdirectory try: return send_from_directory("static/{}".format(group), path=filename) except FileNotFoundError: abort(404) @app.route("/api/add", methods=['POST']) @login_required def api_watch_add(): from changedetectionio import forms form = forms.quickWatchForm(request.form) if not form.validate(): flash("Error") return redirect(url_for('index')) url = request.form.get('url').strip() if datastore.url_exists(url): flash('The URL {} already exists'.format(url), "error") return redirect(url_for('index')) # @todo add_watch should throw a custom Exception for validation etc new_uuid = datastore.add_watch(url=url, tag=request.form.get('tag').strip()) if new_uuid: # Straight into the queue. update_q.put(new_uuid) flash("Watch added.") return redirect(url_for('index')) @app.route("/api/delete", methods=['GET']) @login_required def api_delete(): uuid = request.args.get('uuid') if uuid != 'all' and not uuid in datastore.data['watching'].keys(): flash('The watch by UUID {} does not exist.'.format(uuid), 'error') return redirect(url_for('index')) # More for testing, possible to return the first/only if uuid == 'first': uuid = list(datastore.data['watching'].keys()).pop() datastore.delete(uuid) flash('Deleted.') return redirect(url_for('index')) @app.route("/api/clone", methods=['GET']) @login_required def api_clone(): uuid = request.args.get('uuid') # More for testing, possible to return the first/only if uuid == 'first': uuid = list(datastore.data['watching'].keys()).pop() new_uuid = datastore.clone(uuid) update_q.put(new_uuid) flash('Cloned.') return redirect(url_for('index')) @app.route("/api/checknow", methods=['GET']) @login_required def api_watch_checknow(): tag = request.args.get('tag') uuid = request.args.get('uuid') i = 0 running_uuids = [] for t in running_update_threads: running_uuids.append(t.current_uuid) # @todo check thread is running and skip if uuid: if uuid not in running_uuids: update_q.put(uuid) i = 1 elif tag != None: # Items that have this current tag for watch_uuid, watch in datastore.data['watching'].items(): if (tag != None and tag in watch['tag']): if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']: update_q.put(watch_uuid) i += 1 else: # No tag, no uuid, add everything. for watch_uuid, watch in datastore.data['watching'].items(): if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']: update_q.put(watch_uuid) i += 1 flash("{} watches are queued for rechecking.".format(i)) return redirect(url_for('index', tag=tag)) @app.route("/api/share-url", methods=['GET']) @login_required def api_share_put_watch(): """Given a watch UUID, upload the info and return a share-link the share-link can be imported/added""" import requests import json tag = request.args.get('tag') uuid = request.args.get('uuid') # more for testing if uuid == 'first': uuid = list(datastore.data['watching'].keys()).pop() # copy it to memory as trim off what we dont need (history) watch = deepcopy(datastore.data['watching'][uuid]) if (watch.get('history')): del (watch['history']) # for safety/privacy for k in list(watch.keys()): if k.startswith('notification_'): del watch[k] for r in['uuid', 'last_checked', 'last_changed']: if watch.get(r): del (watch[r]) # Add the global stuff which may have an impact watch['ignore_text'] += datastore.data['settings']['application']['global_ignore_text'] watch['subtractive_selectors'] += datastore.data['settings']['application']['global_subtractive_selectors'] watch_json = json.dumps(watch) try: r = requests.request(method="POST", data={'watch': watch_json}, url="https://changedetection.io/share/share", headers={'App-Guid': datastore.data['app_guid']}) res = r.json() session['share-link'] = "https://changedetection.io/share/{}".format(res['share_key']) except Exception as e: flash("Could not share, something went wrong while communicating with the share server.", 'error') # https://changedetection.io/share/VrMv05wpXyQa # in the browser - should give you a nice info page - wtf # paste in etc return redirect(url_for('index')) # @todo handle ctrl break ticker_thread = threading.Thread(target=ticker_thread_check_time_launch_checks).start() threading.Thread(target=notification_runner).start() # Check for new release version, but not when running in test/build if not os.getenv("GITHUB_REF", False): threading.Thread(target=check_for_new_version).start() return app # Check for new version and anonymous stats def check_for_new_version(): import requests import urllib3 urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) while not app.config.exit.is_set(): try: r = requests.post("https://changedetection.io/check-ver.php", data={'version': __version__, 'app_guid': datastore.data['app_guid'], 'watch_count': len(datastore.data['watching']) }, verify=False) except: pass try: if "new_version" in r.text: app.config['NEW_VERSION_AVAILABLE'] = True except: pass # Check daily app.config.exit.wait(86400) def notification_runner(): global notification_debug_log while not app.config.exit.is_set(): try: # At the moment only one thread runs (single runner) n_object = notification_q.get(block=False) except queue.Empty: time.sleep(1) else: # Process notifications try: from changedetectionio import notification notification.process_notification(n_object, datastore) except Exception as e: print("Watch URL: {} Error {}".format(n_object['watch_url'], str(e))) # UUID wont be present when we submit a 'test' from the global settings if 'uuid' in n_object: datastore.update_watch(uuid=n_object['uuid'], update_obj={'last_notification_error': "Notification error detected, please see logs."}) log_lines = str(e).splitlines() notification_debug_log += log_lines # Trim the log length notification_debug_log = notification_debug_log[-100:] # Thread runner to check every minute, look for new watches to feed into the Queue. def ticker_thread_check_time_launch_checks(): from changedetectionio import update_worker # Spin up Workers that do the fetching # Can be overriden by ENV or use the default settings n_workers = int(os.getenv("FETCH_WORKERS", datastore.data['settings']['requests']['workers'])) for _ in range(n_workers): new_worker = update_worker.update_worker(update_q, notification_q, app, datastore) running_update_threads.append(new_worker) new_worker.start() while not app.config.exit.is_set(): # Get a list of watches by UUID that are currently fetching data running_uuids = [] for t in running_update_threads: if t.current_uuid: running_uuids.append(t.current_uuid) # Re #232 - Deepcopy the data incase it changes while we're iterating through it all while True: try: copied_datastore = deepcopy(datastore) except RuntimeError as e: # RuntimeError: dictionary changed size during iteration time.sleep(0.1) else: break # Re #438 - Don't place more watches in the queue to be checked if the queue is already large while update_q.qsize() >= 2000: time.sleep(1) # Check for watches outside of the time threshold to put in the thread queue. now = time.time() recheck_time_minimum_seconds = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 60)) recheck_time_system_seconds = datastore.threshold_seconds for uuid, watch in copied_datastore.data['watching'].items(): # No need todo further processing if it's paused if watch['paused']: continue # If they supplied an individual entry minutes to threshold. threshold = now watch_threshold_seconds = watch.threshold_seconds() if watch_threshold_seconds: threshold -= watch_threshold_seconds else: threshold -= recheck_time_system_seconds # Yeah, put it in the queue, it's more than time if watch['last_checked'] <= max(threshold, recheck_time_minimum_seconds): if not uuid in running_uuids and uuid not in update_q.queue: update_q.put(uuid) # Wait a few seconds before checking the list again time.sleep(3) # Should be low so we can break this out in testing app.config.exit.wait(1)
OblivionClient.py
# -*- coding: utf-8 -*- --noconsole import sys import threading import ctypes import time import random import pathlib import base64 import glob import logging import coloredlogs import verboselogs from PySide2 import QtCore, QtWidgets, QtGui from PySide2.QtWidgets import QWidget from PySide2.QtGui import (QBrush, QColor, QConicalGradient, QCursor, QFont, QFontDatabase, QIcon, QKeySequence, QLinearGradient, QPalette, QPainter, QPixmap, QRadialGradient) from PySide2.QtCore import (QMetaObject, QCoreApplication, QDate, QDateTime, QMetaObject, QObject, QPoint, QRect, QSize, QTime, QUrl, Qt) from PySide2.QtWidgets import * from interface import Ui_MainWindow from os import environ from sqlalchemy import create_engine from etc.email.send_email import * from etc.api.scylla import * from etc.api.intelxapi import * from etc.api.intelligencex import * from etc.api.hibpwned import * from etc.scrap.wordlists_search import * from etc.scrap.pastebin import * from etc.scrap.google_dorks import * from etc.api.cves import * from etc.create import * from etc.notification.telegram import * from etc.decript import decript_file from etc.api.keys import * # Global variables/Variáveis globais. db_file = 'data.db' path_central = str(pathlib.Path(__file__).parent.absolute()) def suppress_qt_warnings(): """ Suppresses some warnings of graphics/Suprimir alguns avisos da parte gráfica. Suppresses some warnings of graphics/Suprimir alguns avisos da parte gráfica. """ environ["QT_DEVICE_PIXEL_RATIO"] = "0" environ["QT_AUTO_SCREEN_SCALE_FACTOR"] = "1" environ["QT_SCREEN_SCALE_FACTORS"] = "1" environ["QT_SCALE_FACTOR"] = "1" def logando_main(tipo, mensagem): """ :param tipo: Sets the log type/Seta o tipo de log. :param mensagem: Sets the message of log/Seta a mensagem do log. :return: Returns the complete log's body/Retorn o corpo completo do log. """ logger = logging.getLogger(__name__) coloredlogs.install(level='DEBUG') coloredlogs.install(level='DEBUG', logger=logger) logging.basicConfig(format='%(asctime)s %(hostname)s %(name)s[%(process)d] %(levelname)s %(message)s') logger = verboselogs.VerboseLogger('') if tipo == 'verbose': logger.verbose(mensagem) elif tipo == 'debug': logger.debug(mensagem) elif tipo == 'info': logger.info(mensagem) elif tipo == 'warning': logger.warning(mensagem) elif tipo == 'error': logger.error(mensagem) elif tipo == 'critical': logger.critical(mensagem) else: pass class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): """ Gets the Oblivion's GUI/Pega a parte gráfica do Oblivion. """ def __init__(self): super(MainWindow, self).__init__() print("Oblivion [version 1.0.0]", "Copyright (c) 2020, Loseys - BSD License.\n", sep='\n'); logando_main('info','Starting the Oblivion') self.setupUi(self) self.analise_nova_analise.clicked.connect(self.segundo_plano) self.toolButton_4.clicked.connect(self.window2) self.setWindowIcon(QtGui.QIcon('icone.png')) myappid = 'ImproveSecurity.Oblivion.Scan.Beta' ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(myappid) self.config_modulos.expandToDepth(1) self.config_formatos.expandToDepth(1) self.menu_configuracoes.clicked.connect(self.configuracao_oblivion) self.analise_salvar.clicked.connect(self.loop_set) self.cancelar_analise = False self.arquivo_decript = None self.analise_salvar.setDisabled(True) self.push_decodificar.clicked.connect(self.decript_f) self.toolButton_selecionar_arq.clicked.connect(self.abrir_f) self.pushButton_2.clicked.connect(self.agendar_tarefa) now = datetime.datetime.now() self.dateEdit.setDate(QDate(now.year, now.month, now.day)) self.timeEdit.setTime(QTime(now.hour, now.minute)) self.menu_analises.clicked.connect(self.historico_listar) #self.modulos_modulos_naturallanguage.setDisabled(1) self.setWindowIcon(QtGui.QIcon(f'{path_central}/media/oblivion-256.png')) def historico_listar(self): """ Lists all the scans listed on Oblivion's historic/Lista todos os scans do histórico. """ try: self.listWidget_2.clear() with open(f'{path_central}/etc/logs/activity.txt', 'r') as historico: analises = historico.read() analises = analises.split('\n') for e in analises: if e != '' and e != ' ': self.listWidget_2.addItem(e) except: logando_main('error', 'Was not possible to load the historic') def agendar_tarefa(self): """ Creates the scheduled scan/Cria análise agendada. """ hash_f = random.getrandbits(28) data_agenda = self.dateEdit.text() data_agenda = data_agenda.replace('/','-') hora_agenda = self.timeEdit.text() hora_agenda = hora_agenda.replace(':','_') path_oblivion = os.path.dirname(__file__) path_scripts = str(path_oblivion) + '/' path_schedule = path_scripts + f'etc/schedule/scripts/{self.plainTextEdit.toPlainText()}_{data_agenda}-{hora_agenda}.txt' path_start = path_oblivion + '/schedule.py' print(path_start) with open(f'etc/schedule/scripts/{self.plainTextEdit.toPlainText()}_{data_agenda}-{hora_agenda}.txt', 'w+') as criar_config: criar_config.write( f"""agenda_modulos_vulscan={self.agenda_modulos_vulscan.checkState(0)} agenda_modulos_commonwebsites={self.agenda_modulos_commonwebsites.checkState(0)} agenda_modulos_googledorks={self.agenda_modulos_googledorks.checkState(0)} agenda_modulos_wordlists={self.agenda_modulos_wordlists.checkState(0)} agenda_modulos_leak_api={self.agenda_modulos_leak_api.checkState(0)} agenda_gerar_copia_dadobruto={self.agenda_gerar_copia_dadobruto.checkState(0)} agenda_gerar_copia_txt={self.agenda_gerar_copia_txt.checkState(0)} agenda_gerar_copia_txt_ocult={self.agenda_gerar_copia_txt_ocult.checkState(0)} agenda_gerar_copia_txt_cript={self.agenda_gerar_copia_txt_cript.checkState(0)} agenda_gerar_copia_docx={self.agenda_gerar_copia_docx.checkState(0)} agenda_gerar_copia_docx_ocult={self.agenda_gerar_copia_docx_ocult.checkState(0)} agenda_gerar_copia_docx_cript={self.agenda_gerar_copia_docx_cript.checkState(0)} agenda_gerar_copia_pdf={self.agenda_gerar_copia_pdf.checkState(0)} agenda_gerar_copia_pdf_ocult={self.agenda_gerar_copia_pdf_ocult.checkState(0)} agenda_gerar_copia_pdf_cript={self.agenda_gerar_copia_pdf_cript.checkState(0)} agenda_gerar_copia_xlsx={self.agenda_gerar_copia_xlsx.checkState(0)} agenda_gerar_copia_xlsx_ocult={self.agenda_gerar_copia_xlsx_ocult.checkState(0)} agenda_gerar_copia_xlsx_cript={self.agenda_gerar_copia_xlsx_cript.checkState(0)} agenda_gerar_copia_json={self.agenda_gerar_copia_json.checkState(0)} agenda_gerar_copia_json_ocult={self.agenda_gerar_copia_json_ocult.checkState(0)} agenda_gerar_copia_json_cript={self.agenda_gerar_copia_json_cript.checkState(0)} agenda_gerar_copia_html={self.agenda_gerar_copia_html.checkState(0)} agenda_gerar_copia_html_ocult={self.agenda_gerar_copia_html_ocult.checkState(0)} agenda_gerar_copia_html_cript={self.agenda_gerar_copia_html_cript.checkState(0)} agenda_gerar_copia_xsl={self.agenda_gerar_copia_xsl.checkState(0)} agenda_gerar_copia_xsl_ocult={self.agenda_gerar_copia_xsl_ocult.checkState(0)} agenda_gerar_copia_xsl_cript={self.agenda_gerar_copia_xsl_cript.checkState(0)} agenda_gerar_copia_db={self.agenda_gerar_copia_db.checkState(0)} agenda_gerar_copia_db_ocult={self.agenda_gerar_copia_db_ocult.checkState(0)} agenda_gerar_copia_db_cript={self.agenda_gerar_copia_db_cript.checkState(0)} agenda_gerar_dadobruto={self.agenda_gerar_dadobruto.checkState(0)}""") try: executar = (f'SCHTASKS /create /tn "Oblivion {hash_f} - {self.plainTextEdit.toPlainText()}" /tr "{path_start} {path_schedule}" /sc ONCE /sd {self.dateEdit.text()} /st {self.timeEdit.text()}') os.system(executar) except: logando_main('error', 'Was not possible to create the schedule') def abrir_f(self): """ Selects the file path to decrypt/Seleciona o path do arquivo para descriptografar. """ self.arquivo_decript = str(QtWidgets.QFileDialog.getOpenFileName()[0]) self.lineEdit_local_arquivo.setText(self.arquivo_decript) def decript_f(self): """ Fixs bar spaces in the path file of decrypt function/Arruma alguns espaços do path do arquivo que será descriptografado. """ resolver_key = self.lineEdit_chave.text().replace(' ','') if resolver_key: decript_file(arquivo=self.lineEdit_local_arquivo.text(), chave=resolver_key) else: try: decript_file(arquivo=self.lineEdit_local_arquivo.text()) except: pass def loop_set(self): """ Function of the stop button, if pressed will stop the scan/Função do botão de cancelar, caso seja pressionado a análise será cancelada. Note: this function have a delay to complete/Essa função apresenta um delay para ser concluida. """ self.cancelar_analise = True def window2(self): """ Shows the window of input the passwords, e-mails and documents/Mostra a janela onde coloca-se as senhas, e-mails e os documentos. """ self.w = Window2() self.w.show() def configuracao_oblivion(self): """ Shows the configuration window/Mostra a tela de configuração. """ self.cfo = WindowConfigOblivion() self.cfo.show() def segundo_plano(self, interval=1): """ Starts the thread process/Inicia o processo de thread. """ self.interval = interval self.thread = threading.Thread(target=self.rundaemon, args=()) self.thread.daemon = True self.thread.start() def rundaemon(self): """ Calls the scan function/Chama a função de scan. """ self.nova_analise() def nova_analise(self): """ Scan function/Função da análise. """ with open(f'{path_central}/etc/logs/activity.txt', 'a') as historico: historico.write(f'\n{datetime.datetime.now()}') self.analise_nova_analise.setDisabled(True) self.analise_salvar.setDisabled(False) situacaoIntelx = None situacaoDehashed = None situacaoHaveIPwned = None situacaoScylla = None situacaoEmailrep = None situacao_loop = self.db_looping.isChecked() while True: timer_analise_f = (self.db_delay.text()) timer_analise_f = str(timer_analise_f) timer_analise_f = timer_analise_f.replace(',','.') timer_analise = float(timer_analise_f) time.sleep(timer_analise) exportar_dados = [] exportar_dados_wl =[] exportar_dados_pbdb = [] exportar_dados_dorks = [] exportar_dados_nltk = [] exportar_dados_cves = [] with open('etc/config', 'r') as file: abrir_arquivo_config = file.read() abrir_arquivo_config = abrir_arquivo_config.split('\n') for i in abrir_arquivo_config: if 'intelx' in i: i = i.replace('intelx:', '') if i == 'yes': situacaoIntelx = True else: situacaoIntelx = False if 'dehashed' in i: i = i.replace('dehashed:', '') if i == 'yes': situacaoDehashed = True else: situacaoDehashed = False if 'haveipwned' in i: i = i.replace('haveipwned:', '') if i == 'yes': situacaoHaveIPwned = True else: situacaoHaveIPwned = False if 'scylla' in i: i = i.replace('scylla:', '') if i == 'yes': situacaoScylla = True else: situacaoScylla = False if 'emailrep' in i: i = i.replace('emailrep:', '') if i == 'yes': situacaoEmailrep = True else: situacaoEmailrep = False if self.modulos_modulos_dataleak_common.checkState(0): logando_main('info', 'Verifying the common websites') exportar_dados_pbdb = [] dados_separados = self.pegar_dados_arquivo() for i in dados_separados: if '00SHSTRx00' in i: i = i.replace(':00SHSTRx00', '') i = str(i) if self.formatos_copia_dadobruto.checkState(0): puxar_pbdb = verificar_ultimos_pb(credencial=i, dadobruto=True, gdr=str(self.formatos_gerar_email_dadobruto.checkState(0))) exportar_dados_pbdb.append(puxar_pbdb) else: puxar_pbdb = verificar_ultimos_pb(credencial=i, dadobruto=False, gdr=str(self.formatos_gerar_email_dadobruto.checkState(0))) exportar_dados_pbdb.append(puxar_pbdb) if '00EMSTRx00' in i: i = i.replace(':00EMSTRx00', '') i = str(i) if self.formatos_copia_dadobruto.checkState(0): puxar_pbdb = verificar_ultimos_pb(credencial=i, dadobruto=True, gdr=str(self.formatos_gerar_email_dadobruto.checkState(0))) exportar_dados_pbdb.append(puxar_pbdb) else: puxar_pbdb = verificar_ultimos_pb(credencial=i, dadobruto=False, gdr=str(self.formatos_gerar_email_dadobruto.checkState(0))) exportar_dados_pbdb.append(puxar_pbdb) if '00DMSTRx00' in i: i = i.replace(':00DMSTRx00', '') i = str(i) if self.formatos_copia_dadobruto.checkState(0): puxar_pbdb = verificar_ultimos_pb(credencial=i, dadobruto=True, gdr=str(self.formatos_gerar_email_dadobruto.checkState(0))) exportar_dados_pbdb.append(puxar_pbdb) else: puxar_pbdb = verificar_ultimos_pb(credencial=i, dadobruto=False, gdr=str(self.formatos_gerar_email_dadobruto.checkState(0))) exportar_dados_pbdb.append(puxar_pbdb) #if self.modulos_modulos_dataleak_personalizada.checkState(0): # pass if self.modulos_modulos_dataleak_googledorks.checkState(0): exportar_dados_dorks = [] logando_main('info', 'Sweeping the google dorks') dados_separados = self.pegar_dados_arquivo() for i in dados_separados: if '00SHSTRx00' in i: i = i.replace(':00SHSTRx00', '') if self.formatos_copia_dadobruto.checkState(0): dados_dorks_separados = google_scrap(termo=i, dadobruto=True, gdr=str(self.formatos_gerar_email_dadobruto.checkState(0))) exportar_dados_dorks.append(dados_dorks_separados) else: dados_dorks_separados = google_scrap(termo=i, dadobruto=False, gdr=str(self.formatos_gerar_email_dadobruto.checkState(0))) exportar_dados_dorks.append(dados_dorks_separados) elif '00EMSTRx00' in i: i = i.replace(':00EMSTRx00', '') if self.formatos_copia_dadobruto.checkState(0): dados_dorks_separados = google_scrap(termo=i, dadobruto=True, gdr=str(self.formatos_gerar_email_dadobruto.checkState(0))) exportar_dados_dorks.append(dados_dorks_separados) else: dados_dorks_separados = google_scrap(termo=i, dadobruto=False, gdr=str(self.formatos_gerar_email_dadobruto.checkState(0))) exportar_dados_dorks.append(dados_dorks_separados) if '00DMSTRx00' in i: i = i.replace(':00DMSTRx00', '') if self.formatos_copia_dadobruto.checkState(0): dados_dorks_separados = google_scrap(termo=i, dadobruto=True, gdr=str(self.formatos_gerar_email_dadobruto.checkState(0))) exportar_dados_dorks.append(dados_dorks_separados) else: dados_dorks_separados = google_scrap(termo=i, dadobruto=False, gdr=str(self.formatos_gerar_email_dadobruto.checkState(0))) exportar_dados_dorks.append(dados_dorks_separados) if self.modulos_modulos_dataleak_wordlist.checkState(0): exportar_dados_wl = [] logando_main('info', 'Checking the word lists') dados_separados = self.pegar_dados_arquivo() for i in dados_separados: if '00SHSTRx00' in i: i = i.replace(':00SHSTRx00','') dados_wl_separados = verificar_dados(i) exportar_dados_wl += dados_wl_separados if self.modulos_modulos_dataleak_api.checkState(0): logando_main('info', 'Initializing the APIs requisitation') dados_separados = self.pegar_dados_arquivo() for i in dados_separados: if '00EMSTRx00' in i: i = i.replace(':00EMSTRx00','') # Have I Been Pwned/email if situacaoHaveIPwned == True: listaf_haveipwned = check_breach(i) for ex3 in listaf_haveipwned: if i in ex3: exportar_dados.append(ex3) # Scylla/email if situacaoScylla == True: temp_scyla = [] if self.formatos_copia_dadobruto.checkState(0): listaf_scylla = chamar_limpar_scylla(email=i, dadobruto=True, gdr=str(self.formatos_gerar_email_dadobruto.checkState(0))) for ex1 in listaf_scylla: if i in ex1: exportar_dados.append(ex1) temp_scyla.append(ex1) for item_lista in temp_scyla: list_temporaria = item_lista.split(':') dominio_vazado = list_temporaria[2] scylla_dado_bruto(dominio_vazado,gdr=str(self.formatos_gerar_email_dadobruto.checkState(0))) else: listaf_scylla = chamar_limpar_scylla(email=i) for ex1 in listaf_scylla: if i in ex1: exportar_dados.append(ex1) # Intelligencex/email if situacaoIntelx == True: if self.formatos_copia_dadobruto.checkState(0): listaf_inteligencex = intelligencex_sid_raw(i, conteudobruto=True, gdr=str(self.formatos_gerar_email_dadobruto.checkState(0))) for ex2 in listaf_inteligencex: if i in ex2: exportar_dados.append(ex2) else: listaf_inteligencex = intelligencex_sid_raw(i) for ex2 in listaf_inteligencex: if i in ex2: exportar_dados.append(ex2) elif '00SHSTRx00' in i: i = i.replace(':00SHSTRx00', '') # Have I Been Pwned/senha if situacaoHaveIPwned == True: listaf_haveipwned = check_breach(i) for ex3 in listaf_haveipwned: if i in ex3: exportar_dados.append(ex3) # Scylla/senha if situacaoScylla == True: temp_scyla = [] if self.formatos_copia_dadobruto.checkState(0): listaf_scylla = chamar_limpar_scylla(senha=i, dadobruto=True, gdr=str(self.formatos_gerar_email_dadobruto.checkState(0))) for ex1 in listaf_scylla: if i in ex1: exportar_dados.append(ex1) temp_scyla.append(ex1) for item_lista in temp_scyla: list_temporaria = item_lista.split(':') dominio_vazado = list_temporaria[2] scylla_dado_bruto(dominio_vazado, gdr=str(self.formatos_gerar_email_dadobruto.checkState(0))) else: listaf_scylla = chamar_limpar_scylla(senha=i) for ex1 in listaf_scylla: if i in ex1: exportar_dados.append(ex1) elif '00DMSTRx00' in i: i = i.replace(':00DMSTRx00', '') # Intelligencex/email if situacaoIntelx == True: if self.formatos_copia_dadobruto.checkState(0): listaf_inteligencex = intelligencex_sid_raw(i, conteudobruto=True, gdr=str( self.formatos_gerar_email_dadobruto.checkState(0))) for ex2 in listaf_inteligencex: if i in ex2: exportar_dados.append(ex2) else: listaf_inteligencex = intelligencex_sid_raw(i) for ex2 in listaf_inteligencex: if i in ex2: exportar_dados.append(ex2) if self.modulos_modulos_vulscan.checkState(0): logando_main('info', 'Scanning the last CVEs') exportar_dados_cves = [] export_cve_t = iniciar_analise_total_cves() exportar_dados_cves += export_cve_t data_atual = datetime.date.today() definir = os.environ for valor, item in definir.items(): if valor == 'APPDATA': lista_item = item.split('\\') usuario = lista_item[2] documentos_f = f'C:/Users/{usuario}/Documents' if exportar_dados_cves != [] or exportar_dados_cves != None or exportar_dados_cves != ' ': with open(f'{documentos_f}/CVEs_results.txt', 'w') as resultado_cves: for e in exportar_dados_cves: resultado_cves.write(e) # Debug: results of scan/resultados das análises #print(f'API --> {exportar_dados}') #print(f'WORDLIST --> {exportar_dados_wl}') #print(f'CW Pastebin --> {exportar_dados_pbdb}') #print(f'Dorks --> {exportar_dados_dorks}') #print(f'NLTK --> {exportar_dados_nltk}') #print(f'CVEs --> {exportar_dados_cves}') if exportar_dados == 'None' or exportar_dados_dorks == 'None ' or exportar_dados_dorks == [ 'None'] or exportar_dados_dorks == [None]: exportar_dados = [] if exportar_dados_wl == 'None' or exportar_dados_dorks == 'None ' or exportar_dados_dorks == [ 'None'] or exportar_dados_dorks == [None]: exportar_dados_wl = [] if exportar_dados_pbdb == 'None' or exportar_dados_dorks == 'None ' or exportar_dados_dorks == [ 'None'] or exportar_dados_dorks == [None]: exportar_dados_pbdb = [] if exportar_dados_dorks == 'None' or exportar_dados_dorks == 'None ' or exportar_dados_dorks == [ 'None'] or exportar_dados_dorks == [None]: exportar_dados_dorks = [] if exportar_dados_cves == 'None' or exportar_dados_dorks == 'None ' or exportar_dados_dorks == [ 'None'] or exportar_dados_dorks == [None]: exportar_dados_cves = [] if exportar_dados or exportar_dados_wl or exportar_dados_pbdb or exportar_dados_dorks: if exportar_dados: logando_main('info', 'Saving the results') gerar_resultados(nome='API_', conteudo=exportar_dados, gdrive=str(self.formatos_gerar_email_dadobruto.checkState(0)), txt_f=str(self.formatos_copia_txt.checkState(0)), txt_ocult=str(self.formatos_copia_txt_ocult.checkState(0)),txt_cript=str(self.formatos_copia_txt_cript.checkState(0)), docx_f=str(self.formatos_copia_docx.checkState(0)), docx_ocult=str(self.formatos_copia_docx_ocult.checkState(0)), docx_cript=str(self.formatos_copia_docx_cript.checkState(0)), pdf_f=str(self.formatos_copia_pdf.checkState(0)), pdf_ocult=str(self.formatos_copia_pdf_ocult.checkState(0)), pdf_cript=str(self.formatos_copia_pdf_cript.checkState(0)), xlsx_f=str(self.formatos_copia_xlsx.checkState(0)), xlsx_ocult=str(self.formatos_copia_xlsx_ocult.checkState(0)), xlsx_cript=str(self.formatos_copia_xlsx_cript.checkState(0)), json_f=str(self.formatos_copia_json.checkState(0)), json_ocult=str(self.formatos_copia_json_ocult.checkState(0)), json_cript=str(self.formatos_copia_json_cript.checkState(0)), html_f=str(self.formatos_copia_html.checkState(0)), html_ocult=str(self.formatos_copia_html_ocult.checkState(0)), html_cript=str(self.formatos_copia_html_cript.checkState(0)), xsl_f=str(self.formatos_copia_xls.checkState(0)), xsl_ocult=str(self.formatos_copia_xls_ocult.checkState(0)), xsl_cript=str(self.formatos_copia_xls_cript.checkState(0)), db_f=str(self.formatos_copia_db.checkState(0)), db_ocult=str(self.formatos_copia_db_ocult.checkState(0)), db_cript=str(self.formatos_copia_db_cript.checkState(0))) if exportar_dados_wl: logando_main('info', 'Saving the results') gerar_resultados(nome='WORDLIST_', conteudo=exportar_dados_wl, gdrive=str(self.formatos_gerar_email_dadobruto.checkState(0)), txt_f=str(self.formatos_copia_txt.checkState(0)), txt_ocult=str(self.formatos_copia_txt_ocult.checkState(0)),txt_cript=str(self.formatos_copia_txt_cript.checkState(0)), docx_f=str(self.formatos_copia_docx.checkState(0)), docx_ocult=str(self.formatos_copia_docx_ocult.checkState(0)), docx_cript=str(self.formatos_copia_docx_cript.checkState(0)), pdf_f=str(self.formatos_copia_pdf.checkState(0)), pdf_ocult=str(self.formatos_copia_pdf_ocult.checkState(0)), pdf_cript=str(self.formatos_copia_pdf_cript.checkState(0)), xlsx_f=str(self.formatos_copia_xlsx.checkState(0)), xlsx_ocult=str(self.formatos_copia_xlsx_ocult.checkState(0)), xlsx_cript=str(self.formatos_copia_xlsx_cript.checkState(0)), json_f=str(self.formatos_copia_json.checkState(0)), json_ocult=str(self.formatos_copia_json_ocult.checkState(0)), json_cript=str(self.formatos_copia_json_cript.checkState(0)), html_f=str(self.formatos_copia_html.checkState(0)), html_ocult=str(self.formatos_copia_html_ocult.checkState(0)), html_cript=str(self.formatos_copia_html_cript.checkState(0)), xsl_f=str(self.formatos_copia_xls.checkState(0)), xsl_ocult=str(self.formatos_copia_xls_ocult.checkState(0)), xsl_cript=str(self.formatos_copia_xls_cript.checkState(0)), db_f=str(self.formatos_copia_db.checkState(0)), db_ocult=str(self.formatos_copia_db_ocult.checkState(0)), db_cript=str(self.formatos_copia_db_cript.checkState(0))) if exportar_dados_pbdb: logando_main('info', 'Saving the results') gerar_resultados(nome='COMMONWEBSITES', conteudo=exportar_dados_pbdb, gdrive=str(self.formatos_gerar_email_dadobruto.checkState(0)), txt_f=str(self.formatos_copia_txt.checkState(0)), txt_ocult=str(self.formatos_copia_txt_ocult.checkState(0)),txt_cript=str(self.formatos_copia_txt_cript.checkState(0)), docx_f=str(self.formatos_copia_docx.checkState(0)), docx_ocult=str(self.formatos_copia_docx_ocult.checkState(0)), docx_cript=str(self.formatos_copia_docx_cript.checkState(0)), pdf_f=str(self.formatos_copia_pdf.checkState(0)), pdf_ocult=str(self.formatos_copia_pdf_ocult.checkState(0)), pdf_cript=str(self.formatos_copia_pdf_cript.checkState(0)), xlsx_f=str(self.formatos_copia_xlsx.checkState(0)), xlsx_ocult=str(self.formatos_copia_xlsx_ocult.checkState(0)), xlsx_cript=str(self.formatos_copia_xlsx_cript.checkState(0)), json_f=str(self.formatos_copia_json.checkState(0)), json_ocult=str(self.formatos_copia_json_ocult.checkState(0)), json_cript=str(self.formatos_copia_json_cript.checkState(0)), html_f=str(self.formatos_copia_html.checkState(0)), html_ocult=str(self.formatos_copia_html_ocult.checkState(0)), html_cript=str(self.formatos_copia_html_cript.checkState(0)), xsl_f=str(self.formatos_copia_xls.checkState(0)), xsl_ocult=str(self.formatos_copia_xls_ocult.checkState(0)), xsl_cript=str(self.formatos_copia_xls_cript.checkState(0)), db_f=str(self.formatos_copia_db.checkState(0)), db_ocult=str(self.formatos_copia_db_ocult.checkState(0)), db_cript=str(self.formatos_copia_db_cript.checkState(0))) if exportar_dados_dorks: logando_main('info', 'Saving the results') gerar_resultados(nome='DORKS', conteudo=exportar_dados_dorks, gdrive=str(self.formatos_gerar_email_dadobruto.checkState(0)), txt_f=str(self.formatos_copia_txt.checkState(0)), txt_ocult=str(self.formatos_copia_txt_ocult.checkState(0)),txt_cript=str(self.formatos_copia_txt_cript.checkState(0)), docx_f=str(self.formatos_copia_docx.checkState(0)), docx_ocult=str(self.formatos_copia_docx_ocult.checkState(0)), docx_cript=str(self.formatos_copia_docx_cript.checkState(0)), pdf_f=str(self.formatos_copia_pdf.checkState(0)), pdf_ocult=str(self.formatos_copia_pdf_ocult.checkState(0)), pdf_cript=str(self.formatos_copia_pdf_cript.checkState(0)), xlsx_f=str(self.formatos_copia_xlsx.checkState(0)), xlsx_ocult=str(self.formatos_copia_xlsx_ocult.checkState(0)), xlsx_cript=str(self.formatos_copia_xlsx_cript.checkState(0)), json_f=str(self.formatos_copia_json.checkState(0)), json_ocult=str(self.formatos_copia_json_ocult.checkState(0)), json_cript=str(self.formatos_copia_json_cript.checkState(0)), html_f=str(self.formatos_copia_html.checkState(0)), html_ocult=str(self.formatos_copia_html_ocult.checkState(0)), html_cript=str(self.formatos_copia_html_cript.checkState(0)), xsl_f=str(self.formatos_copia_xls.checkState(0)), xsl_ocult=str(self.formatos_copia_xls_ocult.checkState(0)), xsl_cript=str(self.formatos_copia_xls_cript.checkState(0)), db_f=str(self.formatos_copia_db.checkState(0)), db_ocult=str(self.formatos_copia_db_ocult.checkState(0)), db_cript=str(self.formatos_copia_db_cript.checkState(0))) with open(f'{path_central}/etc/parameters.txt') as pegar_conf_e: pegar_ce = pegar_conf_e.read() if 'email_notification:yes' in pegar_ce: if 'email_body:yes' in pegar_ce: expd = ('\n'.join(map(str, exportar_dados))) expd = expd.replace('\n', '<br>') expw = ('\n'.join(map(str, exportar_dados_wl))) expw = expw.replace('\n', '<br>') expp = ('\n'.join(map(str, exportar_dados_pbdb))) expp = expp.replace('\n', '<br>') expg = ('\n'.join(map(str, exportar_dados_dorks))) expg = expg.replace('\n', '<br>') corpo_email_dados = f'<strong>API results:<br></strong><br>{expd}<br><strong><br>Word Lists results:<br></strong><br>{expw}<br>' \ f'<strong><br>Common Websites results:<br></strong><br>{expp}<br><strong><br><br>Google Dorks results:<br></strong><br>{expg}<br>' try: enviar_email_oblivion(status_nosafe=True, data_nosafe=corpo_email_dados) except: pass else: try: enviar_email_oblivion() except: pass if 'telegram_notification:yes' in pegar_ce: if 'telegram_body:yes' in pegar_ce: expd = ('\n'.join(map(str, exportar_dados))) #expd = expd.replace('\n', '<br>') expw = ('\n'.join(map(str, exportar_dados_wl))) #expw = expw.replace('\n', '<br>') expp = ('\n'.join(map(str, exportar_dados_pbdb))) #expp = expp.replace('\n', '<br>') expg = ('\n'.join(map(str, exportar_dados_dorks))) #expg = expg.replace('\n', '<br>') corpo_telegram_dados = f'\n*API results:*\n\n{expd}\n\n*Word Lists results:*\n\n{expw}\n'\ f'\n*Common Websites results:*\n\n{expp}\n\n*Google Dorks results:*\n\n{expg}\n' try: notificar_telegram(status_nosafe=True, data_nosafe=corpo_telegram_dados) except: pass else: try: notificar_telegram() except: pass break if self.cancelar_analise == True: break if situacao_loop == False: break self.cancelar_analise = False logando_main('info', 'Scan successful') self.analise_nova_analise.setDisabled(False) self.analise_salvar.setDisabled(True) if self.db_fechar.isChecked(): app.quit() if self.db_desligar.isChecked(): try: os.system('shutdown /s') except: os.system('shutdown now') def pegar_dados_arquivo(self): """ Collects data from database file/Coleta os dados do arquivo de banco de dados. """ db_connect = create_engine(f'sqlite:///{db_file}') id_items_f = [] id_items_limpo = [] conn = db_connect.connect() try: query = conn.execute("select email from data") # id1 = ([i[0] for i in query.cursor.fetchall()]) for i in query.cursor.fetchall(): ii = str(i[0]) ii += ':00SHSTRx00' id_items_f.append(ii) except: pass try: query = conn.execute("select senha from data") # id1 = ([i[0] for i in query.cursor.fetchall()]) for i in query.cursor.fetchall(): ii = str(i[0]) ii += ':00EMSTRx00' id_items_f.append(ii) except: pass try: query = conn.execute("select documento from data") # id1 = ([i[0] for i in query.cursor.fetchall()]) for i in query.cursor.fetchall(): ii = str(i[0]) ii += ':00DMSTRx00' id_items_f.append(ii) except: pass for e in id_items_f: if e != '' and not ' ' in e and not 'None:' in e: id_items_limpo.append(e) return id_items_limpo class WindowConfigOblivion(QtWidgets.QMainWindow, Ui_MainWindow): """ Window of configuration/Janela de configuração. """ def __init__(self): super().__init__() self.resize(701, 491) self.setWindowIcon(QtGui.QIcon(f'{path_central}/media/oblivion-256.png')) self.setMinimumSize(QSize(701, 491)) self.setMaximumSize(QSize(701, 491)) icon = QIcon() icon.addFile(u":/menu/media/oblivion-256.png", QSize(), QIcon.Normal, QIcon.Off) self.setWindowIcon(QtGui.QIcon('/media/oblivion-256.png')) self.setWindowIcon(icon) self.centralwidget = QWidget(self) self.centralwidget.setObjectName(u"centralwidget") self.frame = QFrame(self.centralwidget) self.frame.setObjectName(u"frame") self.frame.setGeometry(QRect(0, -10, 711, 501)) self.frame.setMinimumSize(QSize(681, 0)) self.frame.setStyleSheet(u"background-color: rgb(255, 255, 255);") self.frame.setFrameShape(QFrame.StyledPanel) self.frame.setFrameShadow(QFrame.Raised) self.frame_notificacao = QFrame(self.frame) self.frame_notificacao.setObjectName(u"frame_notificacao") self.frame_notificacao.setGeometry(QRect(101, 10, 601, 491)) self.frame_notificacao.setFrameShape(QFrame.StyledPanel) self.frame_notificacao.setFrameShadow(QFrame.Raised) self.groupBox_2 = QGroupBox(self.frame_notificacao) self.groupBox_2.setObjectName(u"groupBox_2") self.groupBox_2.setGeometry(QRect(11, 8, 577, 251)) self.plainTextEdit = QPlainTextEdit(self.groupBox_2) self.plainTextEdit.setObjectName(u"plainTextEdit") self.plainTextEdit.setGeometry(QRect(178, 20, 383, 71)) self.plainTextEdit.viewport().setProperty("cursor", QCursor(Qt.IBeamCursor)) self.verticalLayoutWidget_2 = QWidget(self.groupBox_2) self.verticalLayoutWidget_2.setObjectName(u"verticalLayoutWidget_2") self.verticalLayoutWidget_2.setGeometry(QRect(9, 29, 160, 61)) self.verticalLayout_2 = QVBoxLayout(self.verticalLayoutWidget_2) self.verticalLayout_2.setObjectName(u"verticalLayout_2") self.verticalLayout_2.setContentsMargins(0, 0, 0, 0) self.checkBox_2 = QCheckBox(self.verticalLayoutWidget_2) self.checkBox_2.setObjectName(u"checkBox_2") self.checkBox_2.setChecked(True) self.verticalLayout_2.addWidget(self.checkBox_2) self.checkBox_7 = QCheckBox(self.verticalLayoutWidget_2) self.checkBox_7.setObjectName(u"checkBox_7") self.checkBox_7.setChecked(False) self.verticalLayout_2.addWidget(self.checkBox_7) self.plainTextEdit_2 = QPlainTextEdit(self.groupBox_2) self.plainTextEdit_2.setObjectName(u"plainTextEdit_2") self.plainTextEdit_2.setGeometry(QRect(10, 100, 551, 141)) self.groupBox_4 = QGroupBox(self.frame_notificacao) self.groupBox_4.setObjectName(u"groupBox_4") self.groupBox_4.setGeometry(QRect(11, 270, 577, 181)) self.plainTextEdit_3 = QPlainTextEdit(self.groupBox_4) self.plainTextEdit_3.setObjectName(u"plainTextEdit_3") self.plainTextEdit_3.setGeometry(QRect(180, 20, 383, 51)) self.plainTextEdit_3.viewport().setProperty("cursor", QCursor(Qt.IBeamCursor)) self.verticalLayoutWidget_3 = QWidget(self.groupBox_4) self.verticalLayoutWidget_3.setObjectName(u"verticalLayoutWidget_3") self.verticalLayoutWidget_3.setGeometry(QRect(10, 20, 167, 55)) self.verticalLayout_3 = QVBoxLayout(self.verticalLayoutWidget_3) self.verticalLayout_3.setSpacing(6) self.verticalLayout_3.setObjectName(u"verticalLayout_3") self.verticalLayout_3.setContentsMargins(0, 0, 0, 0) self.checkBox_12 = QCheckBox(self.verticalLayoutWidget_3) self.checkBox_12.setObjectName(u"checkBox_12") self.checkBox_12.setChecked(True) self.verticalLayout_3.addWidget(self.checkBox_12) self.checkBox_8 = QCheckBox(self.verticalLayoutWidget_3) self.checkBox_8.setObjectName(u"checkBox_8") self.checkBox_8.setChecked(False) self.verticalLayout_3.addWidget(self.checkBox_8) self.plainTextEdit_4 = QPlainTextEdit(self.groupBox_4) self.plainTextEdit_4.setObjectName(u"plainTextEdit_4") self.plainTextEdit_4.setGeometry(QRect(10, 80, 553, 91)) self.pushButton_2 = QPushButton(self.frame_notificacao) self.pushButton_2.setObjectName(u"pushButton_2") self.pushButton_2.setGeometry(QRect(520, 456, 72, 26)) self.pushButton_2.setStyleSheet(u"background-color: None;") self.frame_apis = QFrame(self.frame) self.frame_apis.setObjectName(u"frame_apis") self.frame_apis.setGeometry(QRect(102, 10, 601, 491)) self.frame_apis.setFrameShape(QFrame.StyledPanel) self.frame_apis.setFrameShadow(QFrame.Raised) self.pushButton = QPushButton(self.frame_apis) self.pushButton.setObjectName(u"pushButton") self.pushButton.setGeometry(QRect(519, 456, 72, 26)) self.pushButton.setStyleSheet(u"background-color: None;") self.groupBox = QGroupBox(self.frame_apis) self.groupBox.setObjectName(u"groupBox") self.groupBox.setGeometry(QRect(10, 8, 577, 111)) self.formLayoutWidget_2 = QWidget(self.groupBox) self.formLayoutWidget_2.setObjectName(u"formLayoutWidget_2") self.formLayoutWidget_2.setGeometry(QRect(10, 20, 551, 81)) self.formLayout_2 = QFormLayout(self.formLayoutWidget_2) self.formLayout_2.setObjectName(u"formLayout_2") self.formLayout_2.setContentsMargins(0, 0, 0, 0) self.checkBox = QCheckBox(self.formLayoutWidget_2) self.checkBox.setObjectName(u"checkBox") self.checkBox.setChecked(True) self.formLayout_2.setWidget(0, QFormLayout.LabelRole, self.checkBox) self.lineEdit = QLineEdit(self.formLayoutWidget_2) self.lineEdit.setObjectName(u"lineEdit") self.formLayout_2.setWidget(0, QFormLayout.FieldRole, self.lineEdit) self.checkBox_3 = QCheckBox(self.formLayoutWidget_2) self.checkBox_3.setObjectName(u"checkBox_3") self.checkBox_3.setChecked(True) self.formLayout_2.setWidget(1, QFormLayout.LabelRole, self.checkBox_3) self.lineEdit_3 = QLineEdit(self.formLayoutWidget_2) self.lineEdit_3.setObjectName(u"lineEdit_3") self.formLayout_2.setWidget(1, QFormLayout.FieldRole, self.lineEdit_3) self.checkBox_4 = QCheckBox(self.formLayoutWidget_2) self.checkBox_4.setObjectName(u"checkBox_4") self.checkBox_4.setChecked(True) self.formLayout_2.setWidget(2, QFormLayout.LabelRole, self.checkBox_4) self.lineEdit_5 = QLineEdit(self.formLayoutWidget_2) self.lineEdit_5.setObjectName(u"lineEdit_5") self.lineEdit_5.setEnabled(False) self.formLayout_2.setWidget(2, QFormLayout.FieldRole, self.lineEdit_5) self.groupBox_3 = QGroupBox(self.frame_apis) self.groupBox_3.setObjectName(u"groupBox_3") self.groupBox_3.setGeometry(QRect(10, 124, 577, 51)) self.formLayoutWidget_3 = QWidget(self.groupBox_3) self.formLayoutWidget_3.setObjectName(u"formLayoutWidget_3") self.formLayoutWidget_3.setGeometry(QRect(10, 20, 551, 22)) self.formLayout_3 = QFormLayout(self.formLayoutWidget_3) self.formLayout_3.setObjectName(u"formLayout_3") self.formLayout_3.setContentsMargins(0, 0, 0, 0) self.checkBox_14 = QCheckBox(self.formLayoutWidget_3) self.checkBox_14.setObjectName(u"checkBox_14") self.checkBox_14.setChecked(True) self.formLayout_3.setWidget(0, QFormLayout.LabelRole, self.checkBox_14) self.lineEdit_10 = QLineEdit(self.formLayoutWidget_3) self.lineEdit_10.setObjectName(u"lineEdit_10") self.formLayout_3.setWidget(0, QFormLayout.FieldRole, self.lineEdit_10) self.widget = QWidget(self.frame) self.widget.setObjectName(u"widget") self.widget.setGeometry(QRect(0, 10, 101, 491)) self.widget.setStyleSheet(u"background-color: rgb(23, 44, 64);") self.verticalLayoutWidget = QWidget(self.widget) self.verticalLayoutWidget.setObjectName(u"verticalLayoutWidget") self.verticalLayoutWidget.setGeometry(QRect(0, 10, 101, 101)) self.verticalLayout = QVBoxLayout(self.verticalLayoutWidget) self.verticalLayout.setObjectName(u"verticalLayout") self.verticalLayout.setContentsMargins(0, 0, 0, 0) self.pushButton_4 = QPushButton(self.verticalLayoutWidget) self.pushButton_4.setObjectName(u"pushButton_4") font = QFont() font.setFamily(u"MS Shell Dlg 2") font.setBold(True) font.setItalic(False) font.setWeight(75) self.pushButton_4.setFont(font) self.pushButton_4.setCursor(QCursor(Qt.PointingHandCursor)) self.pushButton_4.setStyleSheet(u"background-color:none;\n" "color:white;\n" "border-style: outset;\n" "border-radius:10px;\n" "border-color:black;\n" "font:bold 12px;\n" "padding: 6px;\n" "min-width:10px;") self.verticalLayout.addWidget(self.pushButton_4) self.pushButton_5 = QPushButton(self.verticalLayoutWidget) self.pushButton_5.setObjectName(u"pushButton_5") font1 = QFont() font1.setBold(True) font1.setItalic(False) font1.setWeight(75) self.pushButton_5.setFont(font1) self.pushButton_5.setCursor(QCursor(Qt.PointingHandCursor)) self.pushButton_5.setStyleSheet(u"background-color:none;\n" "color:white;\n" "border-style: outset;\n" "border-radius:10px;\n" "border-color:black;\n" "font:bold 12px;\n" "padding: 6px;\n" "min-width:10px;") self.verticalLayout.addWidget(self.pushButton_5) self.pushButton_6 = QPushButton(self.verticalLayoutWidget) self.pushButton_6.setObjectName(u"pushButton_6") self.pushButton_6.setFont(font1) self.pushButton_6.setCursor(QCursor(Qt.PointingHandCursor)) self.pushButton_6.setStyleSheet(u"background-color:none;\n" "color:white;\n" "border-style: outset;\n" "border-radius:10px;\n" "border-color:black;\n" "font:bold 12px;\n" "padding: 6px;\n" "min-width:10px;") self.verticalLayout.addWidget(self.pushButton_6) #self.pushButton_7 = QPushButton(self.widget) #self.pushButton_7.setObjectName(u"pushButton_7") #self.pushButton_7.setGeometry(QRect(90, 210, 89, 26)) #self.pushButton_7.setFont(font1) #self.pushButton_7.setCursor(QCursor(Qt.PointingHandCursor)) #self.pushButton_7.setStyleSheet(u"background-color:none;\n" # "color:white;\n" # "border-style: outset;\n" # "border-radius:10px;\n" # "border-color:black;\n" # "font:bold 12px;\n" # "padding: 6px;\n" # "min-width:10px;") self.frame_autenticacao = QFrame(self.frame) self.frame_autenticacao.setObjectName(u"frame_autenticacao") self.frame_autenticacao.setGeometry(QRect(101, 10, 601, 491)) self.frame_autenticacao.setFrameShape(QFrame.StyledPanel) self.frame_autenticacao.setFrameShadow(QFrame.Raised) self.groupBox_5 = QGroupBox(self.frame_autenticacao) self.groupBox_5.setObjectName(u"groupBox_5") self.groupBox_5.setGeometry(QRect(11, 8, 577, 91)) self.formLayoutWidget = QWidget(self.groupBox_5) self.formLayoutWidget.setObjectName(u"formLayoutWidget") self.formLayoutWidget.setGeometry(QRect(49, 22, 201, 61)) self.formLayout = QFormLayout(self.formLayoutWidget) self.formLayout.setObjectName(u"formLayout") self.formLayout.setContentsMargins(0, 0, 0, 0) self.lineEdit_4 = QLineEdit(self.formLayoutWidget) self.lineEdit_4.setObjectName(u"lineEdit_4") self.formLayout.setWidget(0, QFormLayout.FieldRole, self.lineEdit_4) self.pushButton_8 = QPushButton(self.formLayoutWidget) self.pushButton_8.setObjectName(u"pushButton_8") self.pushButton_8.setStyleSheet(u"background-color: None;") self.formLayout.setWidget(1, QFormLayout.FieldRole, self.pushButton_8) self.label_3 = QLabel(self.groupBox_5) self.label_3.setObjectName(u"label_3") self.label_3.setGeometry(QRect(270, 12, 301, 61)) self.verticalLayoutWidget_5 = QWidget(self.groupBox_5) self.verticalLayoutWidget_5.setObjectName(u"verticalLayoutWidget_5") self.verticalLayoutWidget_5.setGeometry(QRect(10, 20, 43, 21)) self.verticalLayout_5 = QVBoxLayout(self.verticalLayoutWidget_5) self.verticalLayout_5.setObjectName(u"verticalLayout_5") self.verticalLayout_5.setContentsMargins(0, 0, 0, 0) self.label_4 = QLabel(self.verticalLayoutWidget_5) self.label_4.setObjectName(u"label_4") self.verticalLayout_5.addWidget(self.label_4) self.groupBox_6 = QGroupBox(self.frame_autenticacao) self.groupBox_6.setObjectName(u"groupBox_6") self.groupBox_6.setGeometry(QRect(10, 225, 577, 81)) self.formLayoutWidget_4 = QWidget(self.groupBox_6) self.formLayoutWidget_4.setObjectName(u"formLayoutWidget_4") self.formLayoutWidget_4.setGeometry(QRect(49, 19, 201, 51)) self.formLayout_4 = QFormLayout(self.formLayoutWidget_4) self.formLayout_4.setObjectName(u"formLayout_4") self.formLayout_4.setContentsMargins(0, 0, 0, 0) self.pushButton_9 = QPushButton(self.formLayoutWidget_4) self.pushButton_9.setObjectName(u"pushButton_9") self.pushButton_9.setStyleSheet(u"background-color: None;") self.formLayout_4.setWidget(1, QFormLayout.FieldRole, self.pushButton_9) self.lineEdit_7 = QLineEdit(self.formLayoutWidget_4) self.lineEdit_7.setObjectName(u"lineEdit_7") self.formLayout_4.setWidget(0, QFormLayout.FieldRole, self.lineEdit_7) self.label_6 = QLabel(self.groupBox_6) self.label_6.setObjectName(u"label_6") self.label_6.setGeometry(QRect(270, 14, 301, 51)) self.verticalLayoutWidget_6 = QWidget(self.groupBox_6) self.verticalLayoutWidget_6.setObjectName(u"verticalLayoutWidget_6") self.verticalLayoutWidget_6.setGeometry(QRect(10, 20, 41, 21)) self.verticalLayout_6 = QVBoxLayout(self.verticalLayoutWidget_6) self.verticalLayout_6.setObjectName(u"verticalLayout_6") self.verticalLayout_6.setContentsMargins(0, 0, 0, 0) self.label_5 = QLabel(self.verticalLayoutWidget_6) self.label_5.setObjectName(u"label_5") self.verticalLayout_6.addWidget(self.label_5) self.pushButton_3 = QPushButton(self.frame_autenticacao) self.pushButton_3.setObjectName(u"pushButton_3") self.pushButton_3.setGeometry(QRect(520, 456, 72, 26)) self.pushButton_3.setStyleSheet(u"background-color: None;") self.groupBox_7 = QGroupBox(self.frame_autenticacao) self.groupBox_7.setObjectName(u"groupBox_7") self.groupBox_7.setGeometry(QRect(10, 107, 577, 111)) self.formLayoutWidget_5 = QWidget(self.groupBox_7) self.formLayoutWidget_5.setObjectName(u"formLayoutWidget_5") self.formLayoutWidget_5.setGeometry(QRect(49, 22, 201, 81)) self.formLayout_5 = QFormLayout(self.formLayoutWidget_5) self.formLayout_5.setObjectName(u"formLayout_5") self.formLayout_5.setContentsMargins(0, 0, 0, 0) self.lineEdit_8 = QLineEdit(self.formLayoutWidget_5) self.lineEdit_8.setObjectName(u"lineEdit_8") self.formLayout_5.setWidget(0, QFormLayout.FieldRole, self.lineEdit_8) self.lineEdit_9 = QLineEdit(self.formLayoutWidget_5) self.lineEdit_9.setObjectName(u"lineEdit_9") self.formLayout_5.setWidget(1, QFormLayout.FieldRole, self.lineEdit_9) self.pushButton_10 = QPushButton(self.formLayoutWidget_5) self.pushButton_10.setObjectName(u"pushButton_10") self.pushButton_10.setStyleSheet(u"background-color: None;") self.formLayout_5.setWidget(2, QFormLayout.FieldRole, self.pushButton_10) self.label_10 = QLabel(self.groupBox_7) self.label_10.setObjectName(u"label_10") self.label_10.setGeometry(QRect(270, 34, 301, 41)) self.verticalLayoutWidget_4 = QWidget(self.groupBox_7) self.verticalLayoutWidget_4.setObjectName(u"verticalLayoutWidget_4") self.verticalLayoutWidget_4.setGeometry(QRect(10, 22, 41, 41)) self.verticalLayout_4 = QVBoxLayout(self.verticalLayoutWidget_4) self.verticalLayout_4.setSpacing(15) self.verticalLayout_4.setObjectName(u"verticalLayout_4") self.verticalLayout_4.setContentsMargins(0, 0, 0, 0) self.label_7 = QLabel(self.verticalLayoutWidget_4) self.label_7.setObjectName(u"label_7") self.verticalLayout_4.addWidget(self.label_7) self.label_8 = QLabel(self.verticalLayoutWidget_4) self.label_8.setObjectName(u"label_8") self.verticalLayout_4.addWidget(self.label_8) self.widget.raise_() self.frame_notificacao.raise_() self.frame_autenticacao.raise_() self.frame_apis.raise_() self.setCentralWidget(self.centralwidget) self.retranslateUi(self) self.pushButton_4.clicked.connect(self.frame_apis. raise_) self.pushButton_5.clicked.connect(self.frame_notificacao. raise_) self.pushButton_6.clicked.connect(self.frame_autenticacao. raise_) QMetaObject.connectSlotsByName(self) with open(f'{path_central}/etc/email/body.html', 'r') as pegar_html: corpo_do_html = str(pegar_html.read()) with open(f'{path_central}/etc/notification/message.txt', 'r') as pegar_mensagem: corpo_da_mensagem = str(pegar_mensagem.read()) self.plainTextEdit_2.setPlainText(corpo_do_html) self.plainTextEdit_4.setPlainText(corpo_da_mensagem) with open(f'{path_central}/etc/api/keys_db.txt', 'r') as pegar_keys_f: conteudo_keys_f = pegar_keys_f.read().split('\n') for e in conteudo_keys_f: if 'intelligencex_key' in e: e = e.split(' ') intelligencex_key_f = str(e[2].replace("'", '')) if 'haveibeenpwned_key' in e: e = e.split(' ') haveibeenpwned_key_f = str(e[2].replace("'", '')) if 'telegram_bot' in e: e = e.split(' ') telegram_bot_f = str(e[2].replace("'", '')) # setando keys self.lineEdit.setText(intelligencex_key_f) self.lineEdit_3.setText(haveibeenpwned_key_f) self.lineEdit_10.setText(telegram_bot_f) with open(f'{path_central}/etc/config', 'r') as aplicar_config: configs = aplicar_config.read() configs = str(configs) if 'intelx:yes' in configs: self.checkBox.setChecked(1) else: self.checkBox.setChecked(0) if 'haveipwned:yes' in configs: self.checkBox_3.setChecked(1) else: self.checkBox_3.setChecked(0) if 'scylla:yes' in configs: self.checkBox_4.setChecked(1) else: self.checkBox_4.setChecked(0) self.pushButton.clicked.connect(self.salvar_configs) self.pushButton_3.clicked.connect(self.salvar_configs) self.pushButton_2.clicked.connect(self.salvar_configs) with open(f'{path_central}/etc/parameters.txt', 'r') as aplicar_not: e_ap = aplicar_not.read() if 'email_notification:yes' in e_ap: self.checkBox_2.setChecked(1) else: self.checkBox_2.setChecked(0) if 'email_body:yes' in e_ap: self.checkBox_7.setChecked(1) else: self.checkBox_7.setChecked(0) if 'telegram_notification:yes' in e_ap: self.checkBox_12.setChecked(1) else: self.checkBox_12.setChecked(0) if 'telegram_body:yes' in e_ap: self.checkBox_8.setChecked(1) else: self.checkBox_8.setChecked(0) with open(f'{path_central}/etc/email/emails.txt', 'r') as lista_emails: aplicar_lista = lista_emails.read() self.plainTextEdit.setPlainText(aplicar_lista) with open(f'{path_central}/etc/notification/users.txt', 'r') as lista_ids: aplicar_ids = lista_ids.read() self.plainTextEdit_3.setPlainText(aplicar_ids) self.pushButton_8.clicked.connect(self.autenticar_id_gd) self.pushButton_10.clicked.connect(self.aplicar_gmail) self.pushButton_9.clicked.connect(self.aplicar_tbot) def aplicar_tbot(self): """ Authentication of Telegram Bot/Autenticação do Bot de Telegram. """ with open(f'{path_central}/etc/api/keys_db.txt', 'r') as aplicar_bk: conteudo_bk = aplicar_bk.read() conteudo_bk = conteudo_bk.split('\n') with open(f'{path_central}/etc/api/keys_db.txt', 'w') as novo_idb: for e in conteudo_bk: if not 'telegram_bot' in e and e != ' ' and e != '': e = str(e) novo_idb.write(f'{e}\n') if 'telegram_bot' in e and e != ' ' and e != '': ee = str(self.lineEdit_7.text()) ee = ee.replace(' ', '') novo_idb.write(f"telegram_bot = '{ee}'\n") def aplicar_gmail(self): """ Add new email to file/Adiciona novo e-mail no arquivo. """ with open(f'{path_central}/etc/email/parameters.txt', 'w') as aplicar_novo_p: v1 = (base64.b32encode(bytearray(f"{self.lineEdit_8.text()}", 'ascii')).decode('utf-8')) v2 = (base64.b32encode(bytearray(f"{self.lineEdit_9.text()}", 'ascii')).decode('utf-8')) aplicar_novo_p.write(f'id_1:{v1}\nid_2:{v2}') def autenticar_id_gd(self): """ Authentication of Google Drive/Autenticação do Google Drive. """ id_usuario = str(self.lineEdit_4.text()) autenticar_id(id_usuario, f'authentication.txt', 'text/plain', f'{path_central}/etc/api/googledrv/authentication.txt') with open(f'{path_central}/etc/api/googledrv/id_folder.txt', 'w') as novo_id: novo_id.write(id_usuario) def salvar_configs(self): """ Saves the alterations of the configuration window/Salva as alterações da página de configuração. """ k_intelx = f"intelligencex_key = '{self.lineEdit.text()}'" k_hibp = f"haveibeenpwned_key = '{self.lineEdit_3.text()}'" k_telegram = f"telegram_bot = '{self.lineEdit_10.text()}'" with open(f'{path_central}/etc/api/keys_db.txt', 'w') as aplicar_keys: aplicar_keys.write(f'{k_intelx}\n{k_hibp}\n{k_telegram}') with open(f'{path_central}/etc/config', 'w') as aplicar_checkbox: if self.checkBox.isChecked(): aplicar_checkbox.write('intelx:yes\n') else: aplicar_checkbox.write('intelx:no\n') if self.checkBox_3.isChecked(): aplicar_checkbox.write('haveipwned:yes\n') else: aplicar_checkbox.write('haveipwned:no\n') if self.checkBox_4.isChecked(): aplicar_checkbox.write('scylla:yes\n') else: aplicar_checkbox.write('scylla:no\n') with open(f'{path_central}/etc/parameters.txt', 'w') as setar_not: if self.checkBox_2.isChecked(): setar_not.write('email_notification:yes\n') else: setar_not.write('email_notification:no\n') if self.checkBox_7.isChecked(): setar_not.write('email_body:yes\n') else: setar_not.write('email_body:no\n') if self.checkBox_12.isChecked(): setar_not.write('telegram_notification:yes\n') else: setar_not.write('telegram_notification:no\n') if self.checkBox_8.isChecked(): setar_not.write('telegram_body:yes\n') else: setar_not.write('telegram_body:no\n') with open(f'{path_central}/etc/email/emails.txt', 'w') as escrever_emails: escrever_emails.write(self.plainTextEdit.toPlainText()) with open(f'{path_central}/etc/notification/users.txt', 'w') as escrever_ids: escrever_ids.write(self.plainTextEdit_3.toPlainText()) with open(f'{path_central}/etc/email/body.html', 'w') as escrever_html: escrever_html.write(self.plainTextEdit_2.toPlainText()) with open(f'{path_central}/etc/notification/message.txt', 'w') as escrever_mensagem: escrever_mensagem.write(self.plainTextEdit_4.toPlainText()) def retranslateUi(self, MainWindow): """ Applies the strings of the buttons/Aplica as strings dos botões. """ self.setWindowTitle(QCoreApplication.translate("MainWindow", u"Configuration", None)) self.groupBox_2.setTitle(QCoreApplication.translate("MainWindow", u"E-mail", None)) self.checkBox_2.setText(QCoreApplication.translate("MainWindow", u"Notify by e-mail", None)) self.checkBox_7.setText(QCoreApplication.translate("MainWindow", u"Send scan result by \n" "e-mail (not secure)", None)) self.groupBox_4.setTitle(QCoreApplication.translate("MainWindow", u"Telegram", None)) self.checkBox_12.setText(QCoreApplication.translate("MainWindow", u"Notify by telegram", None)) self.checkBox_8.setText(QCoreApplication.translate("MainWindow", u"Send scan result by \n" "Telegram (not secure)", None)) self.pushButton_2.setText(QCoreApplication.translate("MainWindow", u"Save", None)) self.pushButton.setText(QCoreApplication.translate("MainWindow", u"Save", None)) self.groupBox.setTitle(QCoreApplication.translate("MainWindow", u"Keys", None)) self.checkBox.setText(QCoreApplication.translate("MainWindow", u"Intelx", None)) self.lineEdit.setText(QCoreApplication.translate("MainWindow", u"", None)) self.checkBox_3.setText(QCoreApplication.translate("MainWindow", u"Have I Pwned", None)) self.lineEdit_3.setText(QCoreApplication.translate("MainWindow", u"", None)) self.checkBox_4.setText(QCoreApplication.translate("MainWindow", u"Scylla", None)) self.groupBox_3.setTitle(QCoreApplication.translate("MainWindow", u"Others", None)) self.checkBox_14.setText(QCoreApplication.translate("MainWindow", u"Telegram bot", None)) self.lineEdit_10.setText( QCoreApplication.translate("MainWindow", u"", None)) self.pushButton_4.setText(QCoreApplication.translate("MainWindow", u"APIs", None)) self.pushButton_5.setText(QCoreApplication.translate("MainWindow", u"Notification", None)) self.pushButton_6.setText(QCoreApplication.translate("MainWindow", u"Autentication", None)) #self.pushButton_7.setText(QCoreApplication.translate("MainWindow", u"Notification", None)) self.groupBox_5.setTitle(QCoreApplication.translate("MainWindow", u"Google Drive", None)) self.pushButton_8.setText(QCoreApplication.translate("MainWindow", u"Authenticate", None)) self.label_3.setText(QCoreApplication.translate("MainWindow", u"Before authenticate your Google Drive you need to activate\n" "the API of your Google Drive account and put the token in\n" "root folder of Oblivion.", None)) self.label_4.setText(QCoreApplication.translate("MainWindow", u"ID folder", None)) self.groupBox_6.setTitle(QCoreApplication.translate("MainWindow", u"Telegram", None)) self.pushButton_9.setText(QCoreApplication.translate("MainWindow", u"Authenticate", None)) self.label_6.setText(QCoreApplication.translate("MainWindow", u"Before authenticate your Telegram bot you need to send a\n" "mensage to bot.", None)) self.label_5.setText(QCoreApplication.translate("MainWindow", u"API bot", None)) self.pushButton_3.setText(QCoreApplication.translate("MainWindow", u"Save", None)) self.groupBox_7.setTitle(QCoreApplication.translate("MainWindow", u"G-Mail", None)) self.pushButton_10.setText(QCoreApplication.translate("MainWindow", u"Authenticate", None)) self.label_10.setText(QCoreApplication.translate("MainWindow", u"Before authenticate your g-mail account you need to allow\n" "devices third-hand in the settings of your g-mail.", None)) self.label_7.setText(QCoreApplication.translate("MainWindow", u"G-mail", None)) self.label_8.setText(QCoreApplication.translate("MainWindow", u"Pass", None)) class Window2(QtWidgets.QMainWindow, Ui_MainWindow): """ Window of send the passwords;e-mails;documents to database file/Janela de enviar credênciais para a database. """ def __init__(self): super().__init__() self.resize(701, 486) self.setMinimumSize(QSize(701, 0)) self.setMaximumSize(QSize(701, 16777215)) icon = QIcon() icon.addFile(u":/menu/media/oblivion-256.png", QSize(), QIcon.Normal, QIcon.Off) self.setWindowIcon(icon) self.centralwidget = QWidget(self) self.centralwidget.setObjectName(u"centralwidget") self.frame = QFrame(self.centralwidget) self.frame.setObjectName(u"frame") self.frame.setGeometry(QRect(0, -10, 711, 501)) self.frame.setMinimumSize(QSize(681, 0)) self.frame.setStyleSheet(u"background-color: rgb(255, 255, 255);") self.frame.setFrameShape(QFrame.StyledPanel) self.frame.setFrameShadow(QFrame.Raised) self.tableWidget = QTableWidget(self.frame) if (self.tableWidget.columnCount() < 3): self.tableWidget.setColumnCount(3) __qtablewidgetitem = QTableWidgetItem() self.tableWidget.setHorizontalHeaderItem(0, __qtablewidgetitem) __qtablewidgetitem1 = QTableWidgetItem() self.tableWidget.setHorizontalHeaderItem(1, __qtablewidgetitem1) __qtablewidgetitem2 = QTableWidgetItem() self.tableWidget.setHorizontalHeaderItem(2, __qtablewidgetitem2) if (self.tableWidget.rowCount() < 23): self.tableWidget.setRowCount(23) __qtablewidgetitem3 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(0, __qtablewidgetitem3) __qtablewidgetitem4 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(1, __qtablewidgetitem4) __qtablewidgetitem5 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(2, __qtablewidgetitem5) __qtablewidgetitem6 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(3, __qtablewidgetitem6) __qtablewidgetitem7 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(4, __qtablewidgetitem7) __qtablewidgetitem8 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(5, __qtablewidgetitem8) __qtablewidgetitem9 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(6, __qtablewidgetitem9) __qtablewidgetitem10 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(7, __qtablewidgetitem10) __qtablewidgetitem11 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(8, __qtablewidgetitem11) __qtablewidgetitem12 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(9, __qtablewidgetitem12) __qtablewidgetitem13 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(10, __qtablewidgetitem13) __qtablewidgetitem14 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(11, __qtablewidgetitem14) __qtablewidgetitem15 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(12, __qtablewidgetitem15) __qtablewidgetitem16 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(13, __qtablewidgetitem16) __qtablewidgetitem17 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(14, __qtablewidgetitem17) __qtablewidgetitem18 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(15, __qtablewidgetitem18) __qtablewidgetitem19 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(16, __qtablewidgetitem19) __qtablewidgetitem20 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(17, __qtablewidgetitem20) __qtablewidgetitem21 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(18, __qtablewidgetitem21) __qtablewidgetitem22 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(19, __qtablewidgetitem22) __qtablewidgetitem23 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(20, __qtablewidgetitem23) __qtablewidgetitem24 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(21, __qtablewidgetitem24) __qtablewidgetitem25 = QTableWidgetItem() self.tableWidget.setVerticalHeaderItem(22, __qtablewidgetitem25) vl = self.db_data() xc = 0 yc = 0 zc = 0 try: for e in vl: if ':00EMSTRx00' in e: e = e.replace(':00EMSTRx00', '') self.tableWidget.setItem(yc, 0, QTableWidgetItem(e)) yc += 1 if ':00SHSTRx00' in e: e = e.replace(':00SHSTRx00', '') self.tableWidget.setItem(xc, 1, QTableWidgetItem(e)) xc += 1 if ':00DMSTRx00' in e: e = e.replace(':00DMSTRx00', '') self.tableWidget.setItem(zc, 2, QTableWidgetItem(e)) zc += 1 except: pass self.tableWidget.setObjectName(u"tableWidget") self.tableWidget.setGeometry(QRect(14, 49, 673, 432)) self.tableWidget.setStyleSheet(u"background-color: None;") self.pushButton = QPushButton(self.frame) self.pushButton.setObjectName(u"pushButton") self.pushButton.setGeometry(QRect(608, 20, 80, 22)) self.pushButton.setStyleSheet(u"background-color: none;") self.setCentralWidget(self.centralwidget) self.retranslateUi(self) QMetaObject.connectSlotsByName(self) self.pushButton.clicked.connect(self.salvar_db) def salvar_db(self): """ Sends the credentials to database file/Envia os dados para o arquivo database. """ try: os.remove(db_file) xxx = 0 conn = sqlite3.connect(db_file) c = conn.cursor() c.execute('''CREATE TABLE data (email text, senha text, documento text)''') contar_x = int(self.tableWidget.rowCount()) # linha contar_y = int(self.tableWidget.columnCount()) # coluna temp_x = 0 temp_y = 0 total_contar = contar_x * contar_y tmp_lista_db = [] for e in range(contar_x * contar_y): situacao = str(self.tableWidget.item(temp_x, temp_y)) if 'object' in situacao: dados_tabela = self.tableWidget.item(temp_x, temp_y).text() tmp_lista_db.append(dados_tabela) if situacao == 'None': tmp_lista_db.append('None') if temp_y == 2: if tmp_lista_db[0] == '': tmp_lista_db[0] = 'None' if tmp_lista_db[1] == '': tmp_lista_db[1] = 'None' if tmp_lista_db[2] == '': tmp_lista_db[2] = 'None' c.execute(f'''INSERT INTO data VALUES('{tmp_lista_db[0]}', '{tmp_lista_db[1]}', '{tmp_lista_db[2]}')''') temp_y = -1 temp_x += 1 tmp_lista_db.clear() temp_y += 1 conn.commit() conn.close() except: pass def db_data(self): """ Collects data from database file/Coletando dados do arquivo de database. """ db_connect = create_engine(f'sqlite:///{db_file}') id_items2 = [] conn = db_connect.connect() try: query = conn.execute("select email from data") for i in query.cursor.fetchall(): ii = str(i[0]) ii += ':00EMSTRx00' id_items2.append(ii) except: logando_main('warning', 'Was not possible to load the passwords') try: query = conn.execute("select senha from data") for i in query.cursor.fetchall(): ii = str(i[0]) ii += ':00SHSTRx00' id_items2.append(ii) except: logando_main('warning', 'Was not possible to load the passwords') try: query = conn.execute("select documento from data") for i in query.cursor.fetchall(): ii = str(i[0]) ii += ':00DMSTRx00' id_items2.append(ii) except: logando_main('warning', 'Was not possible to load the documents') arrumar_id_items2 = [] for ef in id_items2: if 'None:' in ef: ef = '' arrumar_id_items2.append(ef) arrumar_id_items2.append(ef) return arrumar_id_items2 def retranslateUi(self, MainWindow): """ Applies the strings of the buttons/Aplica as strings dos botões. """ MainWindow.setWindowTitle(QCoreApplication.translate("MainWindow", u"Data", None)) ___qtablewidgetitem = self.tableWidget.horizontalHeaderItem(0) ___qtablewidgetitem.setText(QCoreApplication.translate("MainWindow", u"passwords", None)); ___qtablewidgetitem1 = self.tableWidget.horizontalHeaderItem(1) ___qtablewidgetitem1.setText(QCoreApplication.translate("MainWindow", u"e-mails", None)); ___qtablewidgetitem2 = self.tableWidget.horizontalHeaderItem(2) ___qtablewidgetitem2.setText(QCoreApplication.translate("MainWindow", u"documents", None)); ___qtablewidgetitem3 = self.tableWidget.verticalHeaderItem(0) ___qtablewidgetitem3.setText(QCoreApplication.translate("MainWindow", u"1", None)); ___qtablewidgetitem4 = self.tableWidget.verticalHeaderItem(1) ___qtablewidgetitem4.setText(QCoreApplication.translate("MainWindow", u"2", None)); ___qtablewidgetitem5 = self.tableWidget.verticalHeaderItem(2) ___qtablewidgetitem5.setText(QCoreApplication.translate("MainWindow", u"3", None)); ___qtablewidgetitem6 = self.tableWidget.verticalHeaderItem(3) ___qtablewidgetitem6.setText(QCoreApplication.translate("MainWindow", u"4", None)); ___qtablewidgetitem7 = self.tableWidget.verticalHeaderItem(4) ___qtablewidgetitem7.setText(QCoreApplication.translate("MainWindow", u"5", None)); ___qtablewidgetitem8 = self.tableWidget.verticalHeaderItem(5) ___qtablewidgetitem8.setText(QCoreApplication.translate("MainWindow", u"6", None)); ___qtablewidgetitem9 = self.tableWidget.verticalHeaderItem(6) ___qtablewidgetitem9.setText(QCoreApplication.translate("MainWindow", u"7", None)); ___qtablewidgetitem10 = self.tableWidget.verticalHeaderItem(7) ___qtablewidgetitem10.setText(QCoreApplication.translate("MainWindow", u"8", None)); ___qtablewidgetitem11 = self.tableWidget.verticalHeaderItem(8) ___qtablewidgetitem11.setText(QCoreApplication.translate("MainWindow", u"9", None)); ___qtablewidgetitem12 = self.tableWidget.verticalHeaderItem(9) ___qtablewidgetitem12.setText(QCoreApplication.translate("MainWindow", u"10", None)); ___qtablewidgetitem13 = self.tableWidget.verticalHeaderItem(10) ___qtablewidgetitem13.setText(QCoreApplication.translate("MainWindow", u"11", None)); ___qtablewidgetitem14 = self.tableWidget.verticalHeaderItem(11) ___qtablewidgetitem14.setText(QCoreApplication.translate("MainWindow", u"12", None)); ___qtablewidgetitem15 = self.tableWidget.verticalHeaderItem(12) ___qtablewidgetitem15.setText(QCoreApplication.translate("MainWindow", u"13", None)); ___qtablewidgetitem16 = self.tableWidget.verticalHeaderItem(13) ___qtablewidgetitem16.setText(QCoreApplication.translate("MainWindow", u"14", None)); ___qtablewidgetitem17 = self.tableWidget.verticalHeaderItem(14) ___qtablewidgetitem17.setText(QCoreApplication.translate("MainWindow", u"15", None)); ___qtablewidgetitem18 = self.tableWidget.verticalHeaderItem(15) ___qtablewidgetitem18.setText(QCoreApplication.translate("MainWindow", u"16", None)); ___qtablewidgetitem19 = self.tableWidget.verticalHeaderItem(16) ___qtablewidgetitem19.setText(QCoreApplication.translate("MainWindow", u"17", None)); ___qtablewidgetitem20 = self.tableWidget.verticalHeaderItem(17) ___qtablewidgetitem20.setText(QCoreApplication.translate("MainWindow", u"18", None)); ___qtablewidgetitem21 = self.tableWidget.verticalHeaderItem(18) ___qtablewidgetitem21.setText(QCoreApplication.translate("MainWindow", u"19", None)); ___qtablewidgetitem22 = self.tableWidget.verticalHeaderItem(19) ___qtablewidgetitem22.setText(QCoreApplication.translate("MainWindow", u"20", None)); ___qtablewidgetitem23 = self.tableWidget.verticalHeaderItem(20) ___qtablewidgetitem23.setText(QCoreApplication.translate("MainWindow", u"21", None)); ___qtablewidgetitem24 = self.tableWidget.verticalHeaderItem(21) ___qtablewidgetitem24.setText(QCoreApplication.translate("MainWindow", u"22", None)); ___qtablewidgetitem25 = self.tableWidget.verticalHeaderItem(22) ___qtablewidgetitem25.setText(QCoreApplication.translate("MainWindow", u"23", None)); __sortingEnabled = self.tableWidget.isSortingEnabled() self.tableWidget.setSortingEnabled(False) self.tableWidget.setSortingEnabled(__sortingEnabled) self.pushButton.setText(QCoreApplication.translate("MainWindow", u"Save", None)) if __name__ == "__main__": suppress_qt_warnings() app = QtWidgets.QApplication(sys.argv) window = MainWindow() window.show() sys.exit(app.exec_())
batcher.py
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # Modifications Copyright 2017 Abigail See # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """This file contains code to process data into batches""" import queue as Queue from random import shuffle from threading import Thread import time import numpy as np import tensorflow as tf import data class Example(object): """Class representing a train/val/test example for text summarization.""" def __init__(self, article, abstract_sentences, vocab, hps): """Initializes the Example, performing tokenization and truncation to produce the encoder, decoder and target sequences, which are stored in self. Args: article: source text; a string. each token is separated by a single space. abstract_sentences: list of strings, one per abstract sentence. In each sentence, each token is separated by a single space. vocab: Vocabulary object hps: hyperparameters """ self.hps = hps # Get ids of special tokens start_decoding = vocab.word2id(data.START_DECODING) stop_decoding = vocab.word2id(data.STOP_DECODING) # Process the article article_words = article.split() if len(article_words) > hps.max_enc_steps: article_words = article_words[:hps.max_enc_steps] self.enc_len = len(article_words) # store the length after truncation but before padding self.enc_input = [vocab.word2id(w) for w in article_words] # list of word ids; OOVs are represented by the id for UNK token # Process the abstract abstract = ' '.join(abstract_sentences) # string abstract_words = abstract.split() # list of strings abs_ids = [vocab.word2id(w) for w in abstract_words] # list of word ids; OOVs are represented by the id for UNK token # Get the decoder input sequence and target sequence self.dec_input, self.target = self.get_dec_inp_targ_seqs(abs_ids, hps.max_dec_steps, start_decoding, stop_decoding) self.dec_len = len(self.dec_input) # If using pointer-generator mode, we need to store some extra info if hps.pointer_gen: # Store a version of the enc_input where in-article OOVs are represented by their temporary OOV id; also store the in-article OOVs words themselves self.enc_input_extend_vocab, self.article_oovs = data.article2ids(article_words, vocab) # Get a verison of the reference summary where in-article OOVs are represented by their temporary article OOV id abs_ids_extend_vocab = data.abstract2ids(abstract_words, vocab, self.article_oovs) # Overwrite decoder target sequence so it uses the temp article OOV ids _, self.target = self.get_dec_inp_targ_seqs(abs_ids_extend_vocab, hps.max_dec_steps, start_decoding, stop_decoding) # Store the original strings self.original_article = article self.original_abstract = abstract self.original_abstract_sents = abstract_sentences def get_dec_inp_targ_seqs(self, sequence, max_len, start_id, stop_id): """Given the reference summary as a sequence of tokens, return the input sequence for the decoder, and the target sequence which we will use to calculate loss. The sequence will be truncated if it is longer than max_len. The input sequence must start with the start_id and the target sequence must end with the stop_id (but not if it's been truncated). Args: sequence: List of ids (integers) max_len: integer start_id: integer stop_id: integer Returns: inp: sequence length <=max_len starting with start_id target: sequence same length as input, ending with stop_id only if there was no truncation """ inp = [start_id] + sequence[:] target = sequence[:] if len(inp) > max_len: # truncate inp = inp[:max_len] target = target[:max_len] # no end_token else: # no truncation target.append(stop_id) # end token if len(inp) != len(target): raise ValueError("len(inp) != len(target)") return inp, target def pad_decoder_inp_targ(self, max_len, pad_id): """Pad decoder input and target sequences with pad_id up to max_len.""" while len(self.dec_input) < max_len: self.dec_input.append(pad_id) while len(self.target) < max_len: self.target.append(pad_id) def pad_encoder_input(self, max_len, pad_id): """Pad the encoder input sequence with pad_id up to max_len.""" while len(self.enc_input) < max_len: self.enc_input.append(pad_id) if self.hps.pointer_gen: while len(self.enc_input_extend_vocab) < max_len: self.enc_input_extend_vocab.append(pad_id) class Batch(object): """Class representing a minibatch of train/val/test examples for text summarization.""" def __init__(self, example_list, hps, vocab): """Turns the example_list into a Batch object. Args: example_list: List of Example objects hps: hyperparameters vocab: Vocabulary object """ self.pad_id = vocab.word2id(data.PAD_TOKEN) # id of the PAD token used to pad sequences self.init_encoder_seq(example_list, hps) # initialize the input to the encoder self.init_decoder_seq(example_list, hps) # initialize the input and targets for the decoder self.store_orig_strings(example_list) # store the original strings def init_encoder_seq(self, example_list, hps): """Initializes the following: self.enc_batch: numpy array of shape (batch_size, <=max_enc_steps) containing integer ids (all OOVs represented by UNK id), padded to length of longest sequence in the batch self.enc_lens: numpy array of shape (batch_size) containing integers. The (truncated) length of each encoder input sequence (pre-padding). self.enc_padding_mask: numpy array of shape (batch_size, <=max_enc_steps), containing 1s and 0s. 1s correspond to real tokens in enc_batch and target_batch; 0s correspond to padding. If hps.pointer_gen, additionally initializes the following: self.max_art_oovs: maximum number of in-article OOVs in the batch self.art_oovs: list of list of in-article OOVs (strings), for each example in the batch self.enc_batch_extend_vocab: Same as self.enc_batch, but in-article OOVs are represented by their temporary article OOV number. """ # Determine the maximum length of the encoder input sequence in this batch max_enc_seq_len = max([ex.enc_len for ex in example_list]) # Pad the encoder input sequences up to the length of the longest sequence for ex in example_list: ex.pad_encoder_input(max_enc_seq_len, self.pad_id) # Initialize the numpy arrays # Note: our enc_batch can have different length (second dimension) for each batch because we use dynamic_rnn for the encoder. self.enc_batch = np.zeros((hps.batch_size, max_enc_seq_len), dtype=np.int32) self.enc_lens = np.zeros((hps.batch_size), dtype=np.int32) self.enc_padding_mask = np.zeros((hps.batch_size, max_enc_seq_len), dtype=np.float32) # Fill in the numpy arrays for i, ex in enumerate(example_list): self.enc_batch[i, :] = ex.enc_input[:] self.enc_lens[i] = ex.enc_len for j in range(ex.enc_len): self.enc_padding_mask[i][j] = 1 # For pointer-generator mode, need to store some extra info if hps.pointer_gen: # Determine the max number of in-article OOVs in this batch self.max_art_oovs = max([len(ex.article_oovs) for ex in example_list]) # Store the in-article OOVs themselves self.art_oovs = [ex.article_oovs for ex in example_list] # Store the version of the enc_batch that uses the article OOV ids self.enc_batch_extend_vocab = np.zeros((hps.batch_size, max_enc_seq_len), dtype=np.int32) for i, ex in enumerate(example_list): self.enc_batch_extend_vocab[i, :] = ex.enc_input_extend_vocab[:] def init_decoder_seq(self, example_list, hps): """Initializes the following: self.dec_batch: numpy array of shape (batch_size, max_dec_steps), containing integer ids as input for the decoder, padded to max_dec_steps length. self.target_batch: numpy array of shape (batch_size, max_dec_steps), containing integer ids for the target sequence, padded to max_dec_steps length. self.dec_padding_mask: numpy array of shape (batch_size, max_dec_steps), containing 1s and 0s. 1s correspond to real tokens in dec_batch and target_batch; 0s correspond to padding. """ # Pad the inputs and targets for ex in example_list: ex.pad_decoder_inp_targ(hps.max_dec_steps, self.pad_id) # Initialize the numpy arrays. # Note: our decoder inputs and targets must be the same length for each batch (second dimension = max_dec_steps) because we do not use a dynamic_rnn for decoding. However I believe this is possible, or will soon be possible, with Tensorflow 1.0, in which case it may be best to upgrade to that. self.dec_batch = np.zeros((hps.batch_size, hps.max_dec_steps), dtype=np.int32) self.target_batch = np.zeros((hps.batch_size, hps.max_dec_steps), dtype=np.int32) self.dec_padding_mask = np.zeros((hps.batch_size, hps.max_dec_steps), dtype=np.float32) # Fill in the numpy arrays for i, ex in enumerate(example_list): self.dec_batch[i, :] = ex.dec_input[:] self.target_batch[i, :] = ex.target[:] for j in range(ex.dec_len): self.dec_padding_mask[i][j] = 1 def store_orig_strings(self, example_list): """Store the original article and abstract strings in the Batch object""" self.original_articles = [ex.original_article for ex in example_list] # list of lists self.original_abstracts = [ex.original_abstract for ex in example_list] # list of lists self.original_abstracts_sents = [ex.original_abstract_sents for ex in example_list] # list of list of lists class Batcher(object): """A class to generate minibatches of data. Buckets examples together based on length of the encoder sequence.""" BATCH_QUEUE_MAX = 100 # max number of batches the batch_queue can hold def __init__(self, data_path, vocab, hps, single_pass): """Initialize the batcher. Start threads that process the data into batches. Args: data_path: tf.Example filepattern. vocab: Vocabulary object hps: hyperparameters single_pass: If True, run through the dataset exactly once (useful for when you want to run evaluation on the dev or test set). Otherwise generate random batches indefinitely (useful for training). """ self._data_path = data_path self._vocab = vocab self._hps = hps self._single_pass = single_pass # Initialize a queue of Batches waiting to be used, and a queue of Examples waiting to be batched self._batch_queue = Queue.Queue(self.BATCH_QUEUE_MAX) self._example_queue = Queue.Queue(self.BATCH_QUEUE_MAX * self._hps.batch_size) # Different settings depending on whether we're in single_pass mode or not if single_pass: self._num_example_q_threads = 1 # just one thread, so we read through the dataset just once self._num_batch_q_threads = 1 # just one thread to batch examples self._bucketing_cache_size = 1 # only load one batch's worth of examples before bucketing; this essentially means no bucketing self._finished_reading = False # this will tell us when we're finished reading the dataset else: self._num_example_q_threads = 16 # num threads to fill example queue self._num_batch_q_threads = 4 # num threads to fill batch queue self._bucketing_cache_size = 100 # how many batches-worth of examples to load into cache before bucketing # Start the threads that load the queues self._example_q_threads = [] for _ in range(self._num_example_q_threads): self._example_q_threads.append(Thread(target=self.fill_example_queue)) self._example_q_threads[-1].daemon = True self._example_q_threads[-1].start() self._batch_q_threads = [] for _ in range(self._num_batch_q_threads): self._batch_q_threads.append(Thread(target=self.fill_batch_queue)) self._batch_q_threads[-1].daemon = True self._batch_q_threads[-1].start() # Start a thread that watches the other threads and restarts them if they're dead if not single_pass: # We don't want a watcher in single_pass mode because the threads shouldn't run forever self._watch_thread = Thread(target=self.watch_threads) self._watch_thread.daemon = True self._watch_thread.start() def next_batch(self): """Return a Batch from the batch queue. If mode='decode' then each batch contains a single example repeated beam_size-many times; this is necessary for beam search. Returns: batch: a Batch object, or None if we're in single_pass mode and we've exhausted the dataset. """ # If the batch queue is empty, print a warning if self._batch_queue.qsize() == 0: tf.logging.warning( 'Bucket input queue is empty when calling next_batch. Bucket queue size: %i, Input queue size: %i', self._batch_queue.qsize(), self._example_queue.qsize()) if self._single_pass and self._finished_reading: tf.logging.info("Finished reading dataset in single_pass mode.") return None batch = self._batch_queue.get() # get the next Batch return batch def fill_example_queue(self): """Reads data from file and processes into Examples which are then placed into the example queue.""" input_gen = self.text_generator(data.example_generator(self._data_path, self._single_pass)) while True: try: # (article, abstract) = next(input_gen) # read the next example from file. article and abstract are both strings. (article) = next(input_gen) # read the next example from file. article and abstract are both strings. except StopIteration: # if there are no more examples: tf.logging.info("The example generator for this example queue filling thread has exhausted data.") if self._single_pass: tf.logging.info( "single_pass mode is on, so we've finished reading dataset. This thread is stopping.") self._finished_reading = True break else: raise Exception("single_pass mode is off but the example generator is out of data; error.") # abstract_sentences = [sent.strip() for sent in data.abstract2sents(abstract)] # Use the <s> and </s> tags in abstract to get a list of sentences. example = Example(article, article, self._vocab, self._hps) # Process into an Example. self._example_queue.put(example) # place the Example in the example queue. def fill_batch_queue(self): """Takes Examples out of example queue, sorts them by encoder sequence length, processes into Batches and places them in the batch queue. In decode mode, makes batches that each contain a single example repeated. """ while True: if self._hps.mode != 'decode': # Get bucketing_cache_size-many batches of Examples into a list, then sort inputs = [] for _ in range(self._hps.batch_size * self._bucketing_cache_size): inputs.append(self._example_queue.get()) inputs = sorted(inputs, key=lambda inp: inp.enc_len) # sort by length of encoder sequence # Group the sorted Examples into batches, optionally shuffle the batches, and place in the batch queue. batches = [] for i in range(0, len(inputs), self._hps.batch_size): batches.append(inputs[i:i + self._hps.batch_size]) if not self._single_pass: shuffle(batches) for b in batches: # each b is a list of Example objects self._batch_queue.put(Batch(b, self._hps, self._vocab)) else: # beam search decode mode ex = self._example_queue.get() b = [ex for _ in range(self._hps.batch_size)] self._batch_queue.put(Batch(b, self._hps, self._vocab)) def watch_threads(self): """Watch example queue and batch queue threads and restart if dead.""" while True: time.sleep(60) for idx, t in enumerate(self._example_q_threads): if not t.is_alive(): # if the thread is dead tf.logging.error('Found example queue thread dead. Restarting.') new_t = Thread(target=self.fill_example_queue) self._example_q_threads[idx] = new_t new_t.daemon = True new_t.start() for idx, t in enumerate(self._batch_q_threads): if not t.is_alive(): # if the thread is dead tf.logging.error('Found batch queue thread dead. Restarting.') new_t = Thread(target=self.fill_batch_queue) self._batch_q_threads[idx] = new_t new_t.daemon = True new_t.start() def text_generator(self, example_generator): """Generates article and abstract text from tf.Example. Args: example_generator: a generator of tf.Examples from file. See data.example_generator""" while True: e = next(example_generator) # e is a tf.Example try: article_text = e.features.feature['article'].bytes_list.value[ 0].decode() # the article text was saved under the key 'article' in the data files # abstract_text = e.features.feature['abstract'].bytes_list.value[0].decode() # the abstract text was saved under the key 'abstract' in the data files except ValueError: tf.logging.error('Failed to get article or abstract from example') continue if len(article_text) == 0: # See https://github.com/abisee/pointer-generator/issues/1 tf.logging.warning('Found an example with empty article text. Skipping it.') else: # yield (article_text, abstract_text) yield (article_text)
task.py
"""task.py: Contains the main unit of execution in PyREM, the task.""" __author__ = "Ellis Michael" __email__ = "emichael@cs.washington.edu" __all__ = ['Task', 'SubprocessTask', 'RemoteTask', 'Parallel', 'Sequential'] import atexit import os import random import string import signal import sys from collections import defaultdict from enum import Enum from subprocess import Popen, PIPE from threading import RLock, Thread from traceback import format_exception from pyrem.utils import synchronized TaskStatus = Enum('TaskStatus', 'IDLE STARTED STOPPED') # pylint: disable=C0103 STARTED_TASKS = set() @atexit.register def cleanup(): """Stop all started tasks on system exit. Note: This only handles signals caught by the atexit module by default. SIGKILL, for instance, will not be caught, so cleanup is not guaranteed in all cases. """ to_stop = STARTED_TASKS.copy() if to_stop: print("Cleaning up...") for task in to_stop: try: task.stop() except: # pylint: disable=W0702 etype, value, trace = sys.exc_info() # Disregard no such process exceptions, print out the rest if not (isinstance(value, OSError) and value.errno == 3): print(''.join(format_exception(etype, value, trace, None))) continue def sigterm_handler(_sig, _frame): sys.exit(0) # Register a SIGTERM handler if one is not already registered. # By catching the signal atexit can do its thing in the case of SIGTERM. if not signal.getsignal(signal.SIGTERM): signal.signal(signal.SIGTERM, sigterm_handler) # TODO: create a wait_stopped() so that Tasks can be stopped in parallel class Task(object): """Abstract class, the main unit of execution in PyREM. If you would like to define your own type of ``Task``, you should at least implement the ``_start``, ``_wait``, ``_stop``, and ``_reset`` methods. Every task that gets started will be stopped on Python exit, as long as that exit can be caught by the ``atexit`` module (e.g. pressing `Ctrl+C` will be caught, but sending `SIGKILL` will not be caught). Attributes: return_values (dict): Subclasses of ``Task`` should store all of their results in this field and document what the possible return values are. """ def __init__(self): self._lock = RLock() self._status = TaskStatus.IDLE self.return_values = {} @synchronized def start(self, wait=False): """Start a task. This function depends on the underlying implementation of _start, which any subclass of ``Task`` should implement. Args: wait (bool): Whether or not to wait on the task to finish before returning from this function. Default `False`. Raises: RuntimeError: If the task has already been started without a subsequent call to ``reset()``. """ if self._status is not TaskStatus.IDLE: raise RuntimeError("Cannot start %s in state %s" % (self, self._status)) self._status = TaskStatus.STARTED STARTED_TASKS.add(self) self._start() if wait: self.wait() return self.return_values def _start(self): raise NotImplementedError @synchronized def wait(self): """Wait on a task to finish and stop it when it has finished. Raises: RuntimeError: If the task hasn't been started or has already been stopped. Returns: The ``return_values`` of the task. """ if self._status is not TaskStatus.STARTED: raise RuntimeError("Cannot wait on %s in state %s" % (self, self._status)) self._wait() self.stop() return self.return_values def _wait(self): pass @synchronized def stop(self): """Stop a task immediately. Raises: RuntimeError: If the task hasn't been started or has already been stopped. """ if self._status is TaskStatus.STOPPED: return if self._status is not TaskStatus.STARTED: raise RuntimeError("Cannot stop %s in state %s" % (self, self._status)) self._stop() STARTED_TASKS.remove(self) self._status = TaskStatus.STOPPED def _stop(self): pass @synchronized def reset(self): """Reset a task. Allows a task to be started again, clears the ``return_values``. Raises: RuntimeError: If the task has not been stopped. """ if self._status is not TaskStatus.STOPPED: raise RuntimeError("Cannot reset %s in state %s" % (self, self._status)) self._reset() self.return_values = {} self._status = TaskStatus.IDLE def _reset(self): pass def __repr__(self): # TODO: don't make the reprs so verbose, add a user defined name option? return "Task(status=%s, return_values=%s)" % ( self._status, self.return_values) # TODO: option for sending output to file class SubprocessTask(Task): """A task to run a command as a subprocess on the local host. This process will be killed when this task is stopped. The return code of the process will be stored in ``return_values[\'retcode\']``. Args: command (list of str): The command to execute. Each command-line argument and flag should be a separate element in the command list unless ``shell = True``. quiet (bool): If `True`, the output of this command is not printed. Default `False`. return_output (bool): If `True`, the output of this command will be saved in ``return_values[\'stdout\']`` and ``return_values[\'stderr\']`` when the subprocess is allowed to finish (i.e. when it is waited on instead of being stopped). Default `False`. **quiet** and **return_output** shouldn't both be true. shell (bool): If `True`, allocate a shell to execute the process. See: ``subprocess.Popen``. Default `False`. require_success (bool): If `True` and if this task is waited on instead of being stopped, raises a ``RuntimeError`` if the subprocess has a return code other than `0`. Default `False`. """ _DEVNULL = open(os.devnull, 'w') # pylint: disable=too-many-arguments def __init__(self, command, quiet=False, return_output=False, shell=False, require_success=False): super(SubprocessTask, self).__init__() assert isinstance(command, list) self._command = [str(c) for c in command] self._require_success = require_success self._popen_kwargs = {} self._popen_kwargs['stdin'] = self._DEVNULL if shell: self._popen_kwargs['shell'] = True self._command = ' '.join(self._command) if return_output: self._popen_kwargs['stdout'] = PIPE self._popen_kwargs['stderr'] = PIPE elif quiet: self._popen_kwargs['stdout'] = self._DEVNULL self._popen_kwargs['stderr'] = self._DEVNULL self._process = None def _start(self): self._process = Popen(self._command, **self._popen_kwargs) def _wait(self): # Wait for process to finish output = self._process.communicate() # Raise error if necessary retcode = self._process.returncode if self._require_success and retcode: raise RuntimeError("Return code should have been 0, was %s" % retcode) # Put return code and output in return_values self.return_values['stdout'] = output[0] self.return_values['stderr'] = output[1] self.return_values['retcode'] = retcode def _stop(self): if self._process.returncode is None: self._process.terminate() self._process.kill() def __repr__(self): return ("SubprocessTask(status=%s, return_values=%s, command=%s, " "popen_kwargs=%s)" % ( self._status, self.return_values, self._command, self._popen_kwargs)) # TODO: option for sending remote output to remote file class RemoteTask(SubprocessTask): """A task to run a command on a remote host over ssh. Any processes started on the remote host will be killed when this task is stopped (unless `kill_remote=False` is specified). ``return_values[\'retcode\']`` will contain the return code of the ssh command, which should currently be ignored. Attributes: host (str): The name of the host the task will run on. Args: host (str): The host to run on. command (list of str): The command to execute. quiet (bool): See ``SubprocessTask``. return_output (bool): See ``SubprocessTask``. kill_remote (bool): If `True`, all processes started on the remote server will be killed when this task is stopped. identity_file (str): Path to identity file passed to ssh. Default `None`. """ def __init__(self, host, command, quiet=False, return_output=False, kill_remote=True, identity_file=None): assert isinstance(command, list) self.host = host # TODO: disallow changing this attribute # Expand the path to the identity file if identity_file: identity_file = os.path.expanduser(identity_file) self._identity_file = identity_file # Log the other args self._remote_command = list(command) self._quiet = quiet self._return_output = return_output self._kill_remote = kill_remote # If kill remote, add the PID logging script to the command self._kill_remote = kill_remote if kill_remote: # Temp file holds the PIDs of processes started on remote host self._tmp_file_name = '/tmp/pyrem_procs-' + ''.join( random.SystemRandom().choice( string.ascii_lowercase + string.digits) for _ in range(8)) # TODO: Ending the user's command with ' & jobs ...' might not be # safe. If the command ends in a &, for instance, this will # just fail on the spot. Try to figure out a good way around # this, but at least warn the user in RemoteTask's docstring\ # TODO: handle shells like zsh where the -p flag doesn't just print # out the PIDs command.append(' & jobs -p >%s ; wait' % self._tmp_file_name) if identity_file: ssh_cmd = ['ssh', '-i', identity_file, host, ' '.join(command)] else: ssh_cmd = ['ssh', host, ' '.join(command)] super(RemoteTask, self).__init__(ssh_cmd, quiet=quiet, return_output=return_output, shell=False) # TODO: capture the return code of the remote command def _stop(self): # First, stop the ssh command super(RemoteTask, self)._stop() # Silence the kill_proc to prevent messages about already killed procs if self._kill_remote: kill_proc = Popen( ['ssh', self.host, 'kill -9 `cat %s` ; rm %s' % (self._tmp_file_name, self._tmp_file_name)], stdout=self._DEVNULL, stderr=self._DEVNULL, stdin=self._DEVNULL) kill_proc.wait() def __repr__(self): return ("RemoteTask(status=%s, return_values=%s, command=%s, " "popen_kwargs=%s)" % ( self._status, self.return_values, self._command, self._popen_kwargs)) class Parallel(Task): """A task that executes several given tasks in parallel. Currently does not capture the return_values of the underlying tasks, this will be fixed in the future. Args: tasks (list of ``Task``): Tasks to execute. aggregate (bool): If `True`, will combine multiple RemoteTasks on the same host to use a single ssh session. Default `False`. Example: If you pass Parallel 6 tasks, 3 of which are running on host A and 3 of which are running on host B, aggregate will combine them into 2 tasks: one for host A and one for host B. Warning: The combined task will get its other options (``quiet``, ``kill_remote``, etc.) from one of the constituent commands. If you want to ensure the remote processes are killed, ensure that ``kill_remote`` is `True` for all processes on that host. If you rely on returned output for some of the commands, don't use aggregate (output will get mixed between all commands). """ def __init__(self, tasks, aggregate=False): super(Parallel, self).__init__() self._tasks = tasks if aggregate: self._aggregate() def _aggregate(self): """Helper method to aggregate RemoteTasks into single ssh session.""" # pylint: disable=W0212 nonremote = [t for t in self._tasks if not isinstance(t, RemoteTask)] remote = [t for t in self._tasks if isinstance(t, RemoteTask)] host_dict = defaultdict(list) for task in remote: host_dict[task.host].append(task) aggregated = [] for task_group in host_dict.values(): # Build up combined command combined_cmd = [] for task in task_group: if combined_cmd: combined_cmd.append('&') combined_cmd.append(' '.join(task._remote_command)) # Now, generated aggregate task t0 = task_group[0] # pylint: disable=C0103 task = RemoteTask( t0.host, combined_cmd, t0._quiet, t0._return_output, t0._kill_remote, t0._identity_file) aggregated.append(task) self._tasks = nonremote + aggregated def _start(self): for task in self._tasks: task.start(wait=False) def _wait(self): # TODO: capture the return_values of the tasks for task in self._tasks: task.wait() def _stop(self): # TODO: this isn't quite right if there was an exception during _start # there needs to be some way to kill only the tasks that were started for task in self._tasks: task.stop() def _reset(self): for task in self._tasks: task.reset() def __repr__(self): return "ParallelTask(status=%s, return_values=%s, tasks=%s)" % ( self._status, self.return_values, self._tasks ) class Sequential(Task): """A tasks that executes several given tasks in sequence. Currently does not capture the return_values of the underlying tasks, this will be fixed in the future. Args: tasks (list of ``Task``): Tasks to execute. """ def __init__(self, tasks): super(Sequential, self).__init__() assert isinstance(tasks, list) self._tasks = tasks self._exception = None def run_thread(tasks): try: for task in tasks: task.start(wait=True) except: # pylint: disable=W0702 # Just record the exception and return, the main thread will # raise it self._exception = sys.exc_info() return self._thread = Thread(target=run_thread, args=(tasks,)) def _start(self): self._thread.start() assert self._thread.is_alive() def _wait(self): # TODO: capture the return_values of the tasks self._thread.join() if self._exception: # https://portingguide.readthedocs.io/en/latest/exceptions.html#the-new-raise-syntax ex = self._exception[0](self._exception[1]) ex.__traceback__ = self._exception[2] raise ex def _stop(self): # FIXME this isn't threadsafe at all, have to have a way to signal # the executing thread to stop for task in self._tasks: # pylint: disable=W0212 if task._status in [TaskStatus.STARTED, TaskStatus.STOPPED]: task.stop() else: return def _reset(self): for task in self._tasks: task.reset() def __repr__(self): return "SequentialTask(status=%s, return_values=%s, tasks=%s)" % ( self._status, self.return_values, self._tasks )
main_backup.py
from dk_metric import image_metrics import os from multiprocessing import Process, Lock, Manager import numpy as np import time gt_folder = './180405/180405_Label' prop_folder = './180405/ALBU/train_merged/' output_csv = './scores.csv' radius = 3 files = os.listdir(prop_folder) lock = Lock() ALL_thresholds = [] ALL_precision, ALL_recall, ALL_F1, ALL_Jaccard, ALL_mod_prec, ALL_mod_recall, ALL_mod_F1 = [],[],[],[],[],[],[] Thread_Cnt = 16 manager = Manager() def cal_fp_tp(files, l, threshold): # sTP, sFP, sFN, msTP, msFP, msFN start_time = time.time() sTP, sFP, sFN, msTP, msFP, msFN = 0, 0, 0, 0, 0, 0 for i, f in enumerate(files): gt_path = os.path.join(gt_folder, f.replace('_row_img', '_label')) prop_path = os.path.join(prop_folder, f) if i != 0 and i % 200 == 0: print(os.getpid(), i, 'th file... use', time.time() - start_time, 'seconds.') TP, FP, FN = image_metrics.get_TP_FP_FN(gt_path, prop_path, threshold=threshold) mTP, mFP, mFN = image_metrics.get_mod_TP_FP_FN(gt_path, prop_path, radius=radius, threshold=threshold) sTP += TP sFP += FP sFN += FN msTP += mTP msFP += mFP msFN += mFN with lock: l[0] += sTP l[1] += sFP l[2] += sFN l[3] += msTP l[4] += msFP l[5] += msFN for step in range(10, 31): threshold = (0.025 * step + 0) ALL_thresholds.append(threshold) print('-------------', threshold, '-------------') threshold *= 255 l = manager.list([0, 0, 0, 0, 0, 0]) pool = [] files_threads = np.array_split(files, Thread_Cnt) for i in range(Thread_Cnt): pool.append(Process(target=cal_fp_tp, args=(files_threads[i].tolist(), l, threshold,))) for t in pool: t.start() for t in pool: t.join() sTP, sFP, sFN, msTP, msFP, msFN = list(l) Precision = sTP / (sTP + sFP) if (sTP + sFP != 0) else 1 Recall = sTP / (sTP + sFN) if(sTP + sFN != 0) else 1 Jaccard = 1 / (1/Precision + 1/Recall - 1) if (Precision > 0 and Recall > 0) else 0 F1 = 2 * Precision * Recall / (Precision + Recall) if (Precision > 0 and Recall > 0) else 0 ALL_precision.append(Precision) ALL_recall.append(Recall) ALL_Jaccard.append(Jaccard) ALL_F1.append(F1) mPrecision = msTP / (msTP + msFP) if (msTP + msFP != 0) else 1 mRecall = msTP / (msTP + msFN) if(msTP + msFN != 0) else 1 mF1 = 2 * mPrecision * mRecall / (mPrecision + mRecall) if (mPrecision > 0 and mRecall > 0) else 0 ALL_mod_prec.append(mPrecision) ALL_mod_recall.append(mRecall) ALL_mod_F1.append(mF1) with open(output_csv, 'w') as output: data_thre = 'Threshold,' + ','.join(['{:3f}'.format(v) for v in ALL_thresholds]) data_pre = 'Precision,' + ','.join(['{:3f}'.format(v) for v in ALL_precision]) data_rec = 'Recall,' + ','.join(['{:3f}'.format(v) for v in ALL_recall]) data_jac = 'Jaccard,' + ','.join(['{:3f}'.format(v) for v in ALL_Jaccard]) data_f1 = 'F1,' + ','.join(['{:3f}'.format(v) for v in ALL_F1]) data_mpre = 'Mod_Prec,' + ','.join(['{:3f}'.format(v) for v in ALL_mod_prec]) data_mrec = 'Mod_Rec,' + ','.join(['{:3f}'.format(v) for v in ALL_mod_recall]) data_mf1 = 'Mod_F1,' + ','.join(['{:3f}'.format(v) for v in ALL_mod_F1]) output.write('\n'.join([data_thre, data_pre, data_rec, data_jac, data_f1, data_mpre, data_mrec, data_mf1]))
chrome_test_server_spawner.py
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """A "Test Server Spawner" that handles killing/stopping per-test test servers. It's used to accept requests from the device to spawn and kill instances of the chrome test server on the host. """ import BaseHTTPServer import json import logging import os import select import struct import subprocess import threading import time import urlparse import constants from forwarder import Forwarder import ports # Path that are needed to import necessary modules when running testserver.py. os.environ['PYTHONPATH'] = os.environ.get('PYTHONPATH', '') + ':%s:%s:%s:%s' % ( os.path.join(constants.CHROME_DIR, 'third_party'), os.path.join(constants.CHROME_DIR, 'third_party', 'tlslite'), os.path.join(constants.CHROME_DIR, 'third_party', 'pyftpdlib', 'src'), os.path.join(constants.CHROME_DIR, 'net', 'tools', 'testserver')) SERVER_TYPES = { 'http': '', 'ftp': '-f', 'sync': '--sync', 'tcpecho': '--tcp-echo', 'udpecho': '--udp-echo', } # The timeout (in seconds) of starting up the Python test server. TEST_SERVER_STARTUP_TIMEOUT = 10 def _CheckPortStatus(port, expected_status): """Returns True if port has expected_status. Args: port: the port number. expected_status: boolean of expected status. Returns: Returns True if the status is expected. Otherwise returns False. """ for timeout in range(1, 5): if ports.IsHostPortUsed(port) == expected_status: return True time.sleep(timeout) return False def _GetServerTypeCommandLine(server_type): """Returns the command-line by the given server type. Args: server_type: the server type to be used (e.g. 'http'). Returns: A string containing the command-line argument. """ if server_type not in SERVER_TYPES: raise NotImplementedError('Unknown server type: %s' % server_type) if server_type == 'udpecho': raise Exception('Please do not run UDP echo tests because we do not have ' 'a UDP forwarder tool.') return SERVER_TYPES[server_type] class TestServerThread(threading.Thread): """A thread to run the test server in a separate process.""" def __init__(self, ready_event, arguments, adb, tool, build_type): """Initialize TestServerThread with the following argument. Args: ready_event: event which will be set when the test server is ready. arguments: dictionary of arguments to run the test server. adb: instance of AndroidCommands. tool: instance of runtime error detection tool. build_type: 'Release' or 'Debug'. """ threading.Thread.__init__(self) self.wait_event = threading.Event() self.stop_flag = False self.ready_event = ready_event self.ready_event.clear() self.arguments = arguments self.adb = adb self.tool = tool self.test_server_process = None self.is_ready = False self.host_port = self.arguments['port'] assert isinstance(self.host_port, int) self._test_server_forwarder = None # The forwarder device port now is dynamically allocated. self.forwarder_device_port = 0 # Anonymous pipe in order to get port info from test server. self.pipe_in = None self.pipe_out = None self.command_line = [] self.build_type = build_type def _WaitToStartAndGetPortFromTestServer(self): """Waits for the Python test server to start and gets the port it is using. The port information is passed by the Python test server with a pipe given by self.pipe_out. It is written as a result to |self.host_port|. Returns: Whether the port used by the test server was successfully fetched. """ assert self.host_port == 0 and self.pipe_out and self.pipe_in (in_fds, _, _) = select.select([self.pipe_in, ], [], [], TEST_SERVER_STARTUP_TIMEOUT) if len(in_fds) == 0: logging.error('Failed to wait to the Python test server to be started.') return False # First read the data length as an unsigned 4-byte value. This # is _not_ using network byte ordering since the Python test server packs # size as native byte order and all Chromium platforms so far are # configured to use little-endian. # TODO(jnd): Change the Python test server and local_test_server_*.cc to # use a unified byte order (either big-endian or little-endian). data_length = os.read(self.pipe_in, struct.calcsize('=L')) if data_length: (data_length,) = struct.unpack('=L', data_length) assert data_length if not data_length: logging.error('Failed to get length of server data.') return False port_json = os.read(self.pipe_in, data_length) if not port_json: logging.error('Failed to get server data.') return False logging.info('Got port json data: %s', port_json) port_json = json.loads(port_json) if port_json.has_key('port') and isinstance(port_json['port'], int): self.host_port = port_json['port'] return _CheckPortStatus(self.host_port, True) logging.error('Failed to get port information from the server data.') return False def _GenerateCommandLineArguments(self): """Generates the command line to run the test server. Note that all options are processed by following the definitions in testserver.py. """ if self.command_line: return # The following arguments must exist. type_cmd = _GetServerTypeCommandLine(self.arguments['server-type']) if type_cmd: self.command_line.append(type_cmd) self.command_line.append('--port=%d' % self.host_port) # Use a pipe to get the port given by the instance of Python test server # if the test does not specify the port. if self.host_port == 0: (self.pipe_in, self.pipe_out) = os.pipe() self.command_line.append('--startup-pipe=%d' % self.pipe_out) self.command_line.append('--host=%s' % self.arguments['host']) data_dir = self.arguments['data-dir'] or 'chrome/test/data' if not os.path.isabs(data_dir): data_dir = os.path.join(constants.CHROME_DIR, data_dir) self.command_line.append('--data-dir=%s' % data_dir) # The following arguments are optional depending on the individual test. if self.arguments.has_key('log-to-console'): self.command_line.append('--log-to-console') if self.arguments.has_key('auth-token'): self.command_line.append('--auth-token=%s' % self.arguments['auth-token']) if self.arguments.has_key('https'): self.command_line.append('--https') if self.arguments.has_key('cert-and-key-file'): self.command_line.append('--cert-and-key-file=%s' % os.path.join( constants.CHROME_DIR, self.arguments['cert-and-key-file'])) if self.arguments.has_key('ocsp'): self.command_line.append('--ocsp=%s' % self.arguments['ocsp']) if self.arguments.has_key('https-record-resume'): self.command_line.append('--https-record-resume') if self.arguments.has_key('ssl-client-auth'): self.command_line.append('--ssl-client-auth') if self.arguments.has_key('tls-intolerant'): self.command_line.append('--tls-intolerant=%s' % self.arguments['tls-intolerant']) if self.arguments.has_key('ssl-client-ca'): for ca in self.arguments['ssl-client-ca']: self.command_line.append('--ssl-client-ca=%s' % os.path.join(constants.CHROME_DIR, ca)) if self.arguments.has_key('ssl-bulk-cipher'): for bulk_cipher in self.arguments['ssl-bulk-cipher']: self.command_line.append('--ssl-bulk-cipher=%s' % bulk_cipher) def run(self): logging.info('Start running the thread!') self.wait_event.clear() self._GenerateCommandLineArguments() command = [os.path.join(constants.CHROME_DIR, 'net', 'tools', 'testserver', 'testserver.py')] + self.command_line logging.info('Running: %s', command) self.process = subprocess.Popen(command) if self.process: if self.pipe_out: self.is_ready = self._WaitToStartAndGetPortFromTestServer() else: self.is_ready = _CheckPortStatus(self.host_port, True) if self.is_ready: self._test_server_forwarder = Forwarder( self.adb, [(0, self.host_port)], self.tool, '127.0.0.1', self.build_type) # Check whether the forwarder is ready on the device. self.is_ready = False device_port = self._test_server_forwarder.DevicePortForHostPort( self.host_port) if device_port: for timeout in range(1, 5): if ports.IsDevicePortUsed(self.adb, device_port, 'LISTEN'): self.is_ready = True self.forwarder_device_port = device_port break time.sleep(timeout) # Wake up the request handler thread. self.ready_event.set() # Keep thread running until Stop() gets called. while not self.stop_flag: time.sleep(1) if self.process.poll() is None: self.process.kill() if self._test_server_forwarder: self._test_server_forwarder.Close() self.process = None self.is_ready = False if self.pipe_out: os.close(self.pipe_in) os.close(self.pipe_out) self.pipe_in = None self.pipe_out = None logging.info('Test-server has died.') self.wait_event.set() def Stop(self): """Blocks until the loop has finished. Note that this must be called in another thread. """ if not self.process: return self.stop_flag = True self.wait_event.wait() class SpawningServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): """A handler used to process http GET/POST request.""" def _SendResponse(self, response_code, response_reason, additional_headers, contents): """Generates a response sent to the client from the provided parameters. Args: response_code: number of the response status. response_reason: string of reason description of the response. additional_headers: dict of additional headers. Each key is the name of the header, each value is the content of the header. contents: string of the contents we want to send to client. """ self.send_response(response_code, response_reason) self.send_header('Content-Type', 'text/html') # Specify the content-length as without it the http(s) response will not # be completed properly (and the browser keeps expecting data). self.send_header('Content-Length', len(contents)) for header_name in additional_headers: self.send_header(header_name, additional_headers[header_name]) self.end_headers() self.wfile.write(contents) self.wfile.flush() def _StartTestServer(self): """Starts the test server thread.""" logging.info('Handling request to spawn a test server.') content_type = self.headers.getheader('content-type') if content_type != 'application/json': raise Exception('Bad content-type for start request.') content_length = self.headers.getheader('content-length') if not content_length: content_length = 0 try: content_length = int(content_length) except: raise Exception('Bad content-length for start request.') logging.info(content_length) test_server_argument_json = self.rfile.read(content_length) logging.info(test_server_argument_json) assert not self.server.test_server_instance ready_event = threading.Event() self.server.test_server_instance = TestServerThread( ready_event, json.loads(test_server_argument_json), self.server.adb, self.server.tool, self.server.build_type) self.server.test_server_instance.setDaemon(True) self.server.test_server_instance.start() ready_event.wait() if self.server.test_server_instance.is_ready: self._SendResponse(200, 'OK', {}, json.dumps( {'port': self.server.test_server_instance.forwarder_device_port, 'message': 'started'})) logging.info('Test server is running on port: %d.', self.server.test_server_instance.host_port) else: self.server.test_server_instance.Stop() self.server.test_server_instance = None self._SendResponse(500, 'Test Server Error.', {}, '') logging.info('Encounter problem during starting a test server.') def _KillTestServer(self): """Stops the test server instance.""" # There should only ever be one test server at a time. This may do the # wrong thing if we try and start multiple test servers. if not self.server.test_server_instance: return port = self.server.test_server_instance.host_port logging.info('Handling request to kill a test server on port: %d.', port) self.server.test_server_instance.Stop() # Make sure the status of test server is correct before sending response. if _CheckPortStatus(port, False): self._SendResponse(200, 'OK', {}, 'killed') logging.info('Test server on port %d is killed', port) else: self._SendResponse(500, 'Test Server Error.', {}, '') logging.info('Encounter problem during killing a test server.') self.server.test_server_instance = None def do_POST(self): parsed_path = urlparse.urlparse(self.path) action = parsed_path.path logging.info('Action for POST method is: %s.', action) if action == '/start': self._StartTestServer() else: self._SendResponse(400, 'Unknown request.', {}, '') logging.info('Encounter unknown request: %s.', action) def do_GET(self): parsed_path = urlparse.urlparse(self.path) action = parsed_path.path params = urlparse.parse_qs(parsed_path.query, keep_blank_values=1) logging.info('Action for GET method is: %s.', action) for param in params: logging.info('%s=%s', param, params[param][0]) if action == '/kill': self._KillTestServer() elif action == '/ping': # The ping handler is used to check whether the spawner server is ready # to serve the requests. We don't need to test the status of the test # server when handling ping request. self._SendResponse(200, 'OK', {}, 'ready') logging.info('Handled ping request and sent response.') else: self._SendResponse(400, 'Unknown request', {}, '') logging.info('Encounter unknown request: %s.', action) class SpawningServer(object): """The class used to start/stop a http server.""" def __init__(self, test_server_spawner_port, adb, tool, build_type): logging.info('Creating new spawner on port: %d.', test_server_spawner_port) self.server = BaseHTTPServer.HTTPServer(('', test_server_spawner_port), SpawningServerRequestHandler) self.port = test_server_spawner_port self.server.adb = adb self.server.tool = tool self.server.test_server_instance = None self.server.build_type = build_type def _Listen(self): logging.info('Starting test server spawner') self.server.serve_forever() def Start(self): listener_thread = threading.Thread(target=self._Listen) listener_thread.setDaemon(True) listener_thread.start() time.sleep(1) def Stop(self): if self.server.test_server_instance: self.server.test_server_instance.Stop() self.server.shutdown()
pipeline_utilities.py
#!/usr/bin/env python3 # coding: utf-8 """ Common source for utility functions used by ABCD-BIDS task-fmri-pipeline Greg Conan: gconan@umn.edu Created: 2021-01-15 Updated: 2021-11-12 """ # Import standard libraries import argparse from datetime import datetime # for seeing how long scripts take to run from glob import glob import json import multiprocessing as mp import os import pandas as pd import random # only used by rand_string import shutil import string # only used by rand_string import subprocess import sys import time # Constants: Name of scanner-info command-line argument, directory containing # the main pipeline script, SLURM-/SBATCH-related arguments' default names, and # name of the argument to get the directory containing the main wrapper script SCAN_ARG = 'scanners_info' SCRIPT_DIR = os.path.dirname(os.path.dirname(__file__)) SLURM_ARGS = ('account', 'cpus', 'memory', 'print_progress', 'sleep', 'time') WRAPPER_LOC = 'wrapper_location' def add_arg_if_in_arg_names(arg_name, all_args, parser, *shortnames, **kwargs): """ Wrapper for argparse.ArgumentParser.add_argument. Nearly identical, but will only add the argument to the parser if arg_name is in all_args. :param arg_name: String naming the argument to (maybe) add to parser :param all_args: Set of strings; each names a command-line argument :param parser: argparse.ArgumentParser :param shortnames: Unpacked list of strings; each is arg_name shortened :param kwargs: Unpacked dictionary of argparse attributes to give the arg :return: parser, but (maybe) with the argument named arg_name added """ if arg_name in all_args: cli_arg = as_cli_arg(arg_name) parser.add_argument( cli_arg[1:], cli_arg, *shortnames, **kwargs ) return parser def add_lvl_args_to(parser): """ :param parser: argparse.ArgumentParser with all command-line arguments that the user gave to pipeline_wrapper.py :return: parser with all command-line arguments needed for level X analysis """ # 1) Top-level directory with pipeline_wrapper.py 2) Run number 3) Path to # .json file which stores the 'paths' dictionary # parser.add_argument('--code-dir', type=valid_readable_dir, required=True) parser.add_argument('--run-number', type=valid_whole_number, required=True) parser.add_argument('--temp-json', type=valid_readable_json, required=True) return parser def add_slurm_args_to(parser): """ :param parser: argparse.ArgumentParser with some command-line arguments :return: parser with all CLI arguments needed to run parallel SLURM jobs """ default_CPUs = 1 default_gb_mem = 8 default_sleep = 10 default_time_limit = "01:00:00" parser.add_argument( '-A', '--account', help="Name of the account to submit the SBATCH job under." ) parser.add_argument( '-c', '--cpus', type=valid_whole_number, default=default_CPUs, help=('Number of CPUs to use for each Python job. By default, this ' 'argument\'s value will be {}.'.format(default_CPUs)) ) parser.add_argument( '-mem', '--memory', type=valid_whole_number, default=default_gb_mem, help=("Memory in gigabytes (GB) to assign to each sbatch job. The " "default number is {} GB.".format(default_gb_mem)) ) parser.add_argument( '-progress', '--print-progress', action='store_true', help=('Include this flag for the script to print updates about its ' 'progress at intervals defined by --sleep. This will also print ' 'every command that is run to submit a pipeline batch job.') ) parser.add_argument( '-sleep', '--sleep', type=valid_whole_number, default=default_sleep, help=("Number of seconds to wait between batch job submissions. The " "default number is {}.".format(default_sleep)) ) parser.add_argument( '-time', '--time', metavar="SLURM_JOB_TIME_LIMIT", type=valid_time_str, default=default_time_limit, help=("Time limit for each automated_subset_analysis batch job. The " "time limit must be formatted specifically as HH:MM:SS where HH " "is hours, MM is minutes, and SS is seconds. {} is the default " "time limit.".format(default_time_limit)) ) return parser def argify(argname, argval): """ :param argname: String naming a parameter for a script called from terminal :param argval: Object to assign in string form as the value of the argument :return: String, a parameter assignment for a script called from terminal """ return "--{}={}".format(argname, argval) def as_cli_arg(arg_str): """ :param arg_str: String naming a stored argument taken from the command line :return: String which is the command-line argument form of arg_str """ return "--" + arg_str.replace("_", "-") def copy_and_rename_file(old_file, new_file): """ Rename a file and copy it to a new location :param old_file: String, valid path to an existing file to copy :param new_file: String, valid path to what will be a copy of old_file """ os.rename(shutil.copy2(old_file, os.path.dirname(new_file)), new_file) def copy_event_files_to_default_dir(cli_args, all_event_files): """ Copy all event files into the default event files directory :param cli_args: Dictionary containing all command-line arguments from user :param all_event_files: List of strings that are valid paths to real files """ for each_EV_file in all_event_files: try: shutil.copy(each_EV_file, cli_args['events_dir']) except shutil.SameFileError: pass def count_lines_in_txt_file(filepath): """ Quickly count how many lines are in a text file. Taken from pynative.com/python-count-number-of-lines-in-file :param filepath: String, valid path to an existing readable text file :return: Int, the number of lines in the file at filepath """ with open(filepath, 'r') as infile: # open file in read mode for count, _ in enumerate(infile): pass return count + 1 def dict_has(a_dict, a_key): """ :param a_dict: Dictionary (any) :param a_key: Object (any) :return: True if and only if a_key is mapped to something truthy in a_dict """ return a_key in a_dict and a_dict[a_key] def ensure_dict_has(a_dict, a_key, new_value): """ :param a_dict: Dictionary (any) :param a_key: Object which will be a key in a_dict :param new_value: Object to become the value mapped to a_key in a_dict unless a_key is already mapped to a value :return: a_dict, but with a_key mapped to some value """ if not dict_has(a_dict, a_key): a_dict[a_key] = new_value return a_dict def exit_with_time_info(start_time, exit_code=0): """ Terminate the pipeline after displaying a message showing how long it ran :param start_time: datetime.datetime object of when the script started """ print('The pipeline for this subject took this long to run {}: {}' .format('successfully' if exit_code == 0 else 'and then crashed', datetime.now() - start_time)) sys.exit(exit_code) def extract_from_json(json_path): """ :param json_path: String, a valid path to a real readable .json file :return: Dictionary, the contents of the file at json_path """ with open(json_path, 'r') as infile: return json.load(infile) def get_all_analysis_paths(cli_args): """ Build and save paths for various variables called throughout the pipeline :param cli_args: Dictionary containing all command-line arguments from user :return: Dictionary containing paths to all of the following variables: AROI2, BIDS, dir_lvl, feat_name, final_smooth, lvl_2_paths, sub_paths, templates """ paths = {'dir_lvl': {str(lvl): os.path.join( # Feature dirs for all levels cli_args['output'], 'Level{}_feats'.format(lvl) ) for lvl in (1, 2)}, 'feat_name': '{}.feat'.format(cli_args['study_name']), 'final_smooth': ('_smoothed_{}mm' # Spatial smoothing variable .format(cli_args['spat_smooth']))} for lvl in cli_args['levels']: tmpllv = 'template{}'.format(lvl) paths[tmpllv] = os.path.join(cli_args['templates'], cli_args[tmpllv]) paths['lvl_2'] = get_lvl_paths( paths['dir_lvl']['2'], get_sub_base(cli_args), cli_args['study_name'] + '.gfeat', cli_args['runs'], 'fsf' ) paths['sub_ses'] = {f_or_a: os.path.join( # Subject anat & func directories cli_args['study_dir'], 'derivatives', cli_args['bids_dir'], cli_args['subject'], cli_args['ses'], f_or_a ) for f_or_a in ('anat', 'func')} paths['AROI2'] = os.path.join(cli_args['templates'], 'Atlas_ROIs.2.nii.gz') return paths def get_and_print_time_since(event_name, event_time): """ Print and return a string showing how much time has passed since the current running script reached a certain part of its process :param event_name: String to print after 'Time elapsed since ' :param event_time: datetime object representing a time in the past :return: String with an easily human-readable message showing how much time has passed since {event_time} when {event_name} happened. """ timestamp = ("\nTime elapsed since {}: {}" .format(event_name, datetime.now() - event_time)) print(timestamp) return timestamp def get_args_to_run_film_gls(**kwargs): """ :return: List of strings which are a Bash command calling film_gls """ in_arg = kwargs.pop('in_arg') to_call = ['film_gls', '--sa', argify('in', in_arg)] for argname, argval in kwargs.items(): to_call.append(argify(argname, argval)) return to_call def get_default_ext_command(cmd_name): """ Try to get valid path to external software command file without user input :param cmd_name: String naming the executable command file :return: String, path to the command if the user has the command alias in their .bashrc / $PATH; otherwise None """ try: # If the command path is already defined, then use it cmd = subprocess.check_output(("which", cmd_name) ).decode('utf-8').split()[-1] except subprocess.CalledProcessError: cmd = None return cmd def get_LR_functions(cli_args, paths): """ :param cli_args: Dictionary containing all command-line arguments from user :param paths: Dictionary of path strings, and of dictionaries of path strings, used throughout processing in both levels :return: Dictionary mapping 'surf' to a function which returns the file path string to a .surf.gii file, and mapping 'shape' to a function which returns the file path string to a .shape.gii file """ return {'surf': lambda x: os.path.join( paths['sub_ses']['anat'], get_subj_ses(cli_args) + '_hemi-{}_space-MNI_mesh-fsLR32k_midthickness.surf.gii'.format(x) ), 'shape': lambda y: os.path.join( cli_args['templates'], y + '.atlasroi.32k_fs_LR.shape.gii' )} def get_lvl_paths(lvl_dir, sub_base, feat_name, runs, *extra_subdirs): """ Get a dictionary of paths to analysis-level-specific files for paths dict :param lvl_dir: String, path to the feat directory for level 1 or 2 :param sub_base: String identifying a subject, session, and task :param feat_name: String naming a feature :param runs: List of strings or integers, each identifying a run :param extra_subdirs: Unpacked list of strings naming subdirectories of the level parent directory :return: Dictionary mapping string keys to string paths """ lvl_paths = {'parent': os.path.join(lvl_dir, sub_base + '_' + feat_name)} for run in runs: lvl_paths[run] = os.path.join(lvl_paths['parent'], 'level1_run-{}'.format(run)) for subdr in extra_subdirs: lvl_paths[subdr] = os.path.join(lvl_paths['parent'], subdr + '_files') return lvl_paths def get_main_pipeline_arg_names(): """ :return: Set containing strings naming all command-line arguments included by default in the main script, pipeline_wrapper.py """ return {'bids_dir', 'censor', 'events_dir', 'fd', 'filter', 'fsl_dir', 'keep_all', 'levels', 'no_parallel', 'output', 'runs', 'ses', 'spat_smooth', 'subject', 'surf_smooth', 'study_dir', 'study_name', 'task', 'temp_dir', 'templates', 'template1', 'template2', 'vol_smooth', 'wb_command', WRAPPER_LOC} def get_optional_cli_args(cli_args, drop_slurm=False): """ :param cli_args: Dictionary with all validated command-line arguments, all of which are used by this function :param drop_slurm: True to exclude SLURM arguments; else False :return: List of most cli_args optional arguments and their values """ optional_args = list() for arg in cli_args.keys(): if cli_args[arg] and not (drop_slurm and arg in SLURM_ARGS): optional_args.append(as_cli_arg(arg)) if isinstance(cli_args[arg], list): for el in cli_args[arg]: optional_args.append(str(el)) elif not isinstance(cli_args[arg], bool): optional_args.append(str(cli_args[arg])) return optional_args def get_pipeline_cli_argparser(arg_names=get_main_pipeline_arg_names()): """ :param arg_names: Set containing strings naming all command-line arguments :return: argparse.ArgumentParser with all command-line arguments needed to run pipeline_wrapper.py """ # Default values for user input arguments default_BIDS_dir = 'abcd-hcp-pipeline' default_censor_num = 0 # 2 default_fd = 0.9 default_smooth = 0 default_study_name = 'ABCD' default_runs_lvls = [1, 2] default_temporal_filter = 100 default_wb_command = get_default_ext_command('wb_command') generic_dtseries_path = os.path.join( '(--study-dir)', 'derivatives', '(--bids-dir)', '(--subject)', '(--ses)', 'func', 'sub-(--subject)_ses-(--ses)_task-(--task)_' 'run-(--runs)_bold_timeseries.dtseries.nii' ) generic_output_dirpath = os.path.join('(--study-dir)', 'derivatives', 'abcd-bids-tfmri-pipeline', '(--subject)', '(--ses)') # Strings used in multiple help messages msg_default = ' By default, this argument\'s value(s) will be {}.' msg_pipeline = 'Name of the {} that you are running the pipeline on.' msg_smooth = ('Millimeters of {} smoothing that has already been applied ' 'in the minimal processing steps.') msg_template = 'Name (not full path) of the Level {} .fsf template file.' msg_whole_num = ' This argument must be a positive integer.' # Create parser with command-line arguments from user parser = argparse.ArgumentParser(description=( 'ABCD fMRI Task Prep pipeline. Inputs must be in the same format ' 'as ABCD-HCP-Pipeline outputs after running filemap.' )) parser = add_arg_if_in_arg_names('bids_dir', arg_names, parser, metavar='NAME_OF_BIDS_DERIVATIVES_PIPELINE_DIRECTORY', default=default_BIDS_dir, help=('Name of the BIDS-standard file-mapped directory with subject ' 'data in the "derivatives" subdirectory of your --study-dir. ' 'This path should be valid: ' + generic_dtseries_path + msg_default.format(default_BIDS_dir)) ) # Specify how many initial frames/volumes to censor parser = add_arg_if_in_arg_names('censor', arg_names, parser, metavar='INITIAL_NUMER_OF_TIMEPOINTS_TO_CENSOR', default=default_censor_num, type=valid_whole_number, help=('The number of initial frames/volumes to censor.' + msg_whole_num + msg_default.format(default_censor_num)) ) parser = add_arg_if_in_arg_names('events_dir', arg_names, parser, metavar='EVENT_FILES_DIRECTORY', type=valid_readable_dir, help='Valid path to a real directory containing event .tsv files.' ) # Specify framewise displacement threshold to censor volumes with high motion parser = add_arg_if_in_arg_names('fd', arg_names, parser, metavar='FRAMEWISE_DISPLACEMENT_THRESHOLD', default=default_fd, type=valid_float_0_to_1, help=('The framewise displace threshold for censoring volumes with ' 'high motion. This must be a decimal between 0 and 1.{}' .format(msg_default.format(default_fd))) ) # High pass temporal filter cutoff number value parser = add_arg_if_in_arg_names('filter', arg_names, parser, metavar='HIGH_PASS_TEMPORAL_FILTER_CUTOFF', default=default_temporal_filter, type=valid_whole_number, help=('High pass filter cutoff (in seconds).{}{}'.format( msg_whole_num, msg_default.format(default_temporal_filter) )) ) parser = add_arg_if_in_arg_names('fsl_dir', arg_names, parser, '-fsl', '--fsl', dest='fsl_dir', type=valid_readable_dir, help=('Valid path to an existing directory containing the executable ' 'files fsl, fslmerge, fslmaths, flameo, and feat_model from ' 'the FMRIB Software Library (FSL).') ) parser = add_arg_if_in_arg_names('keep_all', arg_names, parser, action='store_true', help=('Include this flag to keep all files generated during the ' 'pipeline. By default, the pipeline will only keep dtseries, ' 'dof, log, and event files.') ) # Which analysis levels to run parser = add_arg_if_in_arg_names('levels', arg_names, parser, metavar='ANALYSIS_LEVELS_TO_RUN', nargs='*', choices=default_runs_lvls, type=valid_whole_number, help=('Levels to conduct the analysis on: {0} for one run, and/or ' '{1} to merge multiple runs.'.format(*default_runs_lvls)) ) parser = add_arg_if_in_arg_names('no_parallel', arg_names, parser, action='store_true', help=('Include this flag to process level 1 analysis runs ' 'sequentially. By default, the script will process the analyses ' 'in parallel simultaneously.') ) parser = add_arg_if_in_arg_names('output', arg_names, parser, '-out', metavar='OUTPUT_DIRECTORY', type=valid_output_dir, # required=True, help=('Directory path to save pipeline outputs into.' + msg_default.format(generic_output_dirpath)) ) # Specify the number of runs each subject has parser = add_arg_if_in_arg_names('runs', arg_names, parser, metavar='RUN', default=default_runs_lvls, type=valid_whole_number, nargs="+", help=('Each subject\'s number of runs. This argument must be 1 or ' 'more positive integers provided as a space-delimited list. ' 'For example: 1 2 3 4. By default, this argument\'s value(s) ' 'will be 1 2.') ) parser = add_arg_if_in_arg_names(SCAN_ARG, arg_names, parser, type=valid_readable_file, help=('Path to existing .csv file listing all scanners\' parameters. ' + msg_default.format('scan_info/{}.csv in the code directory.' .format(SCAN_ARG))) ) # Which session to run the pipeline on parser = add_arg_if_in_arg_names('ses', arg_names, parser, metavar='SESSION', required=True, # default=default_ses, type=lambda x: valid_subj_ses(x, 'ses-', 'session'), #, 'ses'), help=msg_pipeline.format('session') ) # Desired spatial smoothing number parser = add_arg_if_in_arg_names('spat_smooth', arg_names, parser, metavar='DESIRED_SPATIAL_SMOOTHING', default=default_smooth, type=valid_whole_number, help=('Millimeters of spatial smoothing that you want for the surface ' 'and volume data.' + msg_whole_num + msg_default.format(default_smooth)) ) parser = add_arg_if_in_arg_names('subject', arg_names, parser, metavar='SUBJECT_ID', required=True, type=lambda x: valid_subj_ses(x, 'sub-', 'subject'), #, 'NDAR', 'INV'), help='ID of subject to process.' ) # Surface smoothing number parser = add_arg_if_in_arg_names('surf_smooth', arg_names, parser, metavar='CURRENT_SURFACE_SMOOTHING', default=default_smooth, type=valid_whole_number, help=''.join((msg_smooth.format('surface'), msg_whole_num, msg_default.format(default_smooth))) ) # Set file path for base directory and BIDS directory parser = add_arg_if_in_arg_names('study_dir', arg_names, parser, metavar='BIDS_BASE_STUDY_DIRECTORY', type=valid_readable_dir, required=True, help='Valid path to existing base study directory.' ) parser = add_arg_if_in_arg_names('study_name', arg_names, parser, metavar='STUDY_NAME', default=default_study_name, help=msg_pipeline.format('study') ) # Which task you are running the pipeline on parser = add_arg_if_in_arg_names('task', arg_names, parser, metavar='TASK_NAME', required=True, help=msg_pipeline.format('task') # + msg_choices(choices_tasks) ) parser = add_arg_if_in_arg_names('temp_dir', arg_names, parser, type=valid_readable_dir, metavar='TEMPORARY_DIRECTORY', help=('Valid path to existing directory to save temporary files into.') ) parser = add_arg_if_in_arg_names('templates', arg_names, parser, type=valid_readable_dir, help='Valid path to existing directory with template .fsf files.' ) for lvl in default_runs_lvls: # Specify the .fsf template files' names parser = add_arg_if_in_arg_names( 'template{}'.format(lvl), arg_names, parser, metavar='LEVEL_{}_TEMPLATE_NAME'.format(lvl), type=valid_template_filename, help=msg_template.format(lvl) ) # Volume smoothing number parser = add_arg_if_in_arg_names('vol_smooth', arg_names, parser, metavar='CURRENT_VOLUME_SMOOTHING', default=default_smooth, type=valid_whole_number, help=''.join((msg_smooth.format('volume'), msg_whole_num, msg_default.format(default_smooth))) ) # Specify path to wb_command parser = add_arg_if_in_arg_names('wb_command', arg_names, parser, default=default_wb_command, type=valid_readable_file, help=('Path to wb_command file to run Workbench Command. If this flag ' 'is excluded, then the script will try to guess the path to ' 'the wb_command file by checking the user\'s BASH aliases. ' 'Your default wb_command is "{}". If ' 'that says "None", then you need to include this argument.' .format(default_wb_command)) ) # Argument used to get this script's dir parser = add_arg_if_in_arg_names(WRAPPER_LOC, arg_names, parser, type=valid_readable_dir, required=True, help=('Valid path to existing ABCD-BIDS-task-fmri-pipeline directory ' 'that contains pipeline_wrapper.py') ) return parser def get_region_path_vars(cli_args, paths, run): """ Build and return paths to particular brain region images' files/dirs by filling in the unique parts of generic path strings :param cli_args: Dictionary containing all command-line arguments from user :param paths: Dictionary of path strings, and of dictionaries of path strings, used throughout processing in both levels :param run: Whole number (as an int or a string) defining which run this is :return: Tuple of string generic paths: design, func_str, subcort, surf_str """ # Paths to design file base and subcortical volume stats directory design = os.path.join(paths['lvl_1']['fsf'], get_sub_base(cli_args, run) + '_level1') subcort = os.path.join(paths['lvl_1']['parent'], 'SubcorticalVolumeStats') # Generic strings used as templates for paths later func_str = os.path.join(paths['lvl_1']['intermediate'], '{}{}_filtered.atlasroi{}.{}.32k_fs_LR.func.gii') surf_str = os.path.join(paths['sub_ses']['anat'], ( '{}_hemi-{}_space-MNI_mesh-fsLR32k_midthickness.surf.gii' )) return design, func_str, subcort, surf_str def get_replacements(cli_args, **kwargs): """ :param cli_args: Dictionary containing all command-line arguments from user :return: Dictionary mapping variables' generic names in template files to those variables' actual values provided by the user """ replacements = {'SUBID': cli_args['subject'], 'FEAT_NAME': cli_args['study_name'], # Not paths['feat_name'] 'FIN_SMOOTH': str(cli_args['spat_smooth']), 'HP_FILTER': str(cli_args['filter']), 'SESSION': cli_args['ses'], 'TASK': cli_args['task'], 'OUTPUT_DIR': cli_args['output'], 'EVENTS_DIR': cli_args['events_dir'], 'STUDY_DIR': cli_args['study_dir']} replacements.update(kwargs) return replacements def get_sbatch_args(cli_args, job): """ :param cli_args: Dictionary containing all command-line arguments from user :param job: String 1-8 characters long naming the SBATCH job :return: List of strings, SLURM-related arguments to pass to the main script or level 1 analysis script for parallelization """ return [argify('time', cli_args['time']), '-c', str(cli_args['cpus']), '-J', job, argify('mem', '{}gb'.format(cli_args["memory"]))] def get_sub_base(cli_args, run_num=None): """ :param cli_args: Dictionary containing all command-line arguments from user :param run_num: Whole number as an int or string defining which run this is :return: String identifying a subject, session, task, and maybe run """ parts = [get_subj_ses(cli_args), 'task-' + cli_args['task']] if run_num is not None: parts.append('run-{}'.format(run_num)) return '_'.join(parts) def get_subj_ses(cli_args): """ :param cli_args: Dictionary containing all command-line arguments from user :return: String which combines --subject and --ses from command line """ return '_'.join((cli_args['subject'], cli_args['ses'])) def get_TR_and_ntpts(dtseries_path, wb_command_path): """ :param dtseries_path: String, the full path to a .dtseries.nii file :param wb_command_path: String, the full path to the wb_command executable :return: Tuple of 2 numbers, the number of timepoints and repetition time """ if not os.path.exists(dtseries_path): sys.exit('Error: {} does not exist'.format(dtseries_path)) else: ntpts = wb_command_get_info(wb_command_path, dtseries_path, 'number-of-maps') rep_time = wb_command_get_info(wb_command_path, dtseries_path, 'step-interval') return rep_time, ntpts def glob_and_copy(dest_dirpath, *path_parts_to_glob): """ Collect all files matching a glob string, then copy those files :param dest_dirpath: String, a valid path of a directory to copy files into :param path_parts_to_glob: Unpacked list of strings which join to form a glob string of a path to copy files from """ for file_src in glob(os.path.join(*path_parts_to_glob)): shutil.copy(file_src, dest_dirpath) def make_and_save_confound_matrix(cli_args, desc_tsv_file, lvl_paths, sub_run_basename): """ Create the confound matrix and copy it to subjects fsf_paths for each run :param cli_args: Dictionary containing all command-line arguments from user :param desc_tsv_file: String naming a .tsv file in intermediate_files/ dir :param lvl_paths: Dictionary mapping keys to dir path strings :param sub_run_basename: String naming the subject and the run number (?) :return: String, the base name of the confound matrix .csv file """ # Local variables: File paths, step filename, adjusted variable to censor # initial frames based on user-specification, and result (confounds fname) in_file = os.path.join(lvl_paths['intermediate'], desc_tsv_file) def tsv_file_for_step(stepnum): return os.path.join(lvl_paths['intermediate'], ('{0}_desc-filteredincludingFD_motion_step{1}.tsv' .format(sub_run_basename, stepnum))) censor_volumes = list(range(0, cli_args['censor'])) confounds_name = str(sub_run_basename + '_confound_matrix.tsv') # Read and write framewise displacement step1 .csv file df = pd.read_csv(in_file, sep='\s+') df.framewise_displacement.iloc[[censor_volumes]] = 1 df.framewise_displacement[df.framewise_displacement < cli_args['fd']] = 0 df.framewise_displacement[df.framewise_displacement > 0] = 1 df.framewise_displacement.to_csv(tsv_file_for_step(1), header=False, encoding='utf-8', sep='\t', index=False) # Read and write step2 .csv file df = pd.read_csv(in_file, sep='\s+') cols = ['trans_x_mm', 'trans_y_mm', 'trans_z_mm', 'rot_x_degrees', 'rot_y_degrees', 'rot_z_degrees', 'trans_x_mm_dt', 'trans_y_mm_dt', 'trans_z_mm_dt', 'rot_x_degrees_dt', 'rot_y_degrees_dt', 'rot_z_degrees_dt'] df = df[cols] # the 'cols' intermediate variable is needed to avoid error df.to_csv(tsv_file_for_step(2), sep='\t', encoding='utf-8', index=False, header=False) # Read and write step3 .csv file df = pd.read_csv(tsv_file_for_step(1), names=['A'], sep='\t') df = pd.concat([pd.get_dummies(df[df['A'] == 1].index) .transpose(), df], axis=1).fillna(0) del df['A'] df.to_csv(tsv_file_for_step(3), sep='\t', encoding='utf-8', index=False, header=False) # Read and write confound matrix .csv file; return its name pd.concat([pd.read_csv(tsv_file_for_step(x), header=None, sep='\t') for x in (2, 3)], axis=1).to_csv( os.path.join(lvl_paths['fsf'], confounds_name), sep='\t', encoding='utf-8', header=None, index=False ) return confounds_name def individualize_subprocess_run(run_args, run, to_replace): """ Cycle through every argument in run_args and replace instances of the to_replace string with run, then return the arguments. :param run_args: List of strings, all arguments to call via subprocess :param run: Whole number (as an int or a string) defining which run this is :param to_replace: String to find and replace with each run name/id :return: run_args, but with to_replace replaced by run in them all """ for i in range(len(run_args)): run_args[i] = str(run_args[i]).replace(to_replace, str(run)) return run_args def make_fake_nifti(cli_args, generic, old_smoothed, unique_part, cmd, *args): """ Create a fake nifti from the smoothed dtseries for high-pass filtering :param cli_args: Dictionary containing all command-line arguments from user :param generic: String, new smoothed nifti file path but with a '{}' in it :param old_smoothed: String, the path to a real old smoothed nifti file :param unique_part: String/etc inserted into generic to make a valid path :param cmd: String which is a Bash command but with '{}'s in it to replace :return: String, the valid path to the now-real new smoothed nifti file """ started = datetime.now() new_smoothed = generic.format(unique_part) cmd_args = cmd.format(old_smoothed, *args, new_smoothed).split() if cmd_args[0] == 'wb_command': wb_command(cli_args, *cmd_args[1:]) else: # if cmd_args[0] in ('fsl', 'feat_model', 'film_gls', ): run_fsl_tool(cli_args, *cmd_args) if cli_args['print_progress']: get_and_print_time_since('started making ' + os.path.basename(new_smoothed), started) return new_smoothed def merge_files_in_range(cli_args, file_names, range_to, args): """ :param cli_args: Dictionary containing all command-line arguments from user :param file_names: List of strings where each is a filename :param range_to: Integer, the number of files to merge :param args: List, the rest of the arguments to call merge_to_make_dtseries """ for r in range(0, range_to): for f in file_names: merge_to_make_dtseries(cli_args, str(f) + str(r + 1), *args) def merge_to_make_dtseries(cli_args, fname, lvl_paths, substats, AROI2, shape): """ :param fname: String, base name of the files to merge into a dtseries :param lvl_paths: Dictionary mapping keys to dir path strings :param substats: String, the path to the subcortical stats directory :param AROI2: String, path to Atlas ROI file :param shape: Function takes 'L' or 'R' & returns path to shape.gii file """ cii_out = os.path.join(lvl_paths['GOStats'], fname + '.dtseries.nii') subcort_in = os.path.join(substats, fname + '.nii.gz') func = lambda x: os.path.join(lvl_paths['parent'], x + '_SurfaceStats', fname + '.func.gii') fake_nifti = os.path.join(lvl_paths['GOStats'], fname + '.nii.gz') wb_command(cli_args, '-cifti-create-dense-timeseries', cii_out, '-volume', subcort_in, AROI2, '-left-metric', func('L'), '-roi-left', shape('L'), '-right-metric', func('R'), '-roi-right', shape('R')) wb_command(cli_args, '-cifti-convert', '-to-nifti', cii_out, fake_nifti) def organize_lvl_paths(lvl_paths, *keys_to_remove): """ :param lvl_paths: Dictionary mapping keys to dir path strings :param keys_to_remove: Unpacked list of strings which are lvl_paths keys to exclude from the return list :return: List of all values in lvl_paths (except the ones mapped to keys_to_remove), sorted alphabetically """ lvl_paths = lvl_paths.copy() for each_key in keys_to_remove: lvl_paths.pop(each_key) to_return = list(lvl_paths.values()) to_return.sort(reverse=False) return to_return def overwrite_dirs(dirs_to_overwrite, mkdir=False): """ :param dirs_to_overwrite: List of strings which are paths to directories to create or overwrite with empty directories :param mkdir: True to remake all the dirs after overwrite, else False """ for each_dir in dirs_to_overwrite: if os.path.isdir(each_dir): shutil.rmtree(each_dir) elif os.path.exists(each_dir): os.remove(each_dir) if mkdir: os.makedirs(each_dir) def rand_string(L): """ :param L: Integer, length of the string to randomly generate :return: String (of the given length L) of random characters """ return ''.join(random.choices(string.ascii_lowercase + string.digits, k=L)) def rename_template_file_vars(old_template, new_template, replacements): """ :param old_template: String, path to existing template file :param new_template: String, path to new template file which will be written with old_template variables but renamed :param replacements: Dictionary mapping each string in old_template to the string to replace it with in new_template """ with open(old_template) as infile: # Open the level 1 or 2 template # Create new .fsf file; name the output "sub-*_ses-*_task-*_level*.fsf" with open(new_template, 'w') as outfile: for line in infile: # Use replacements dict to replace variables in the .fsf file for src, target in replacements.items(): line = line.replace(src, target) # Output the new subject-, (run-,) and task-specific .fsf file outfile.write(line) def run_fsl_tool(cli_args, toolname, *args): """ :param cli_args: Dictionary containing all command-line arguments from user :param toolname: String naming the executable tool in --fsl-dir to run :param args: Unpacked list of arguments to run toolname with """ subprocess.check_call([ valid_readable_file(os.path.join(cli_args['fsl_dir'], toolname)), *args ]) def run_parallel_or_sequential(script_path, cli_args, runs, to_replace, extra_args, second_fn=None, second_args=None): """ Run a Python script via subprocess, either sequentially or in parallel depending on cli_args --no-parallel :param script_path: String, valid path to real script to run in parallel :param cli_args: Dictionary containing all command-line arguments from user :param runs: List of unique strings identifying differences between scripts :param to_replace: String to find and replace with each job name/id """ if cli_args['no_parallel']: # Run processes serially/sequentially for run in runs: run_python_subscript(script_path, run, to_replace, *extra_args) else: # Run processes in parallel using Python multiprocessing module to_run = list() all_args = list() for run in runs: all_args.append([script_path, run, to_replace, *extra_args]) to_run.append(mp.Process(args=all_args[-1], name=all_args[-1][0], target=run_python_subscript)) if second_fn and second_args: all_args.append(second_args) to_run.append(mp.Process(target=second_fn, args=second_args, name=second_args[0])) if dict_has(cli_args, 'print_progress'): print('Running parallel:\n' + '\n\n'.join(str(x) for x in all_args)) try: run_parallel(os.path.basename(script_path), to_run, cli_args['sleep'], cli_args['print_progress']) except Exception as e: sys.exit(e) def run_parallel(scriptname, processes, sleep, show): """ Run a script multiple times in parallel :param scriptname: String describing the script being run in parallel :param processes: List of multiprocessing.Process objects ready to run :param sleep_secs: Integer, how many seconds to wait between (a) process submissions and (b) checking if all processes finished :param show: True to show the user what's running at sleep_secs intervals; otherwise False """ started = datetime.now() submitted = list() failed = False for each_process in processes: submitted.append(each_process.start()) time.sleep(sleep) while any((p.exitcode is None) for p in processes): time.sleep(sleep) if show: get_and_print_time_since(scriptname + ' started', started) if not all(p.exitcode is None or p.exitcode == 0 for p in processes): failed = True for p in processes: p.terminate() if failed: sys.exit('Error: {} subprocess failed.'.format(scriptname)) def run_python_subscript(path_to_subscript, run, to_replace, *args): """ Use subprocess to run a Python 3.6+ script from this code base :param path_to_subscript: String, valid path to real Python 3.6+ script :param cli_args: Dictionary containing all command-line arguments from user :param run: Whole number (as an int or a string) defining which run this is :param to_replace: String to find and replace with each run name/id :param args: Unpacked list of parameters to run subscript with """ start_time = datetime.now() try: subprocess.check_call(individualize_subprocess_run( ['python3', path_to_subscript, *args], run, to_replace )) except subprocess.CalledProcessError: err_type, err_msg, _ = sys.exc_info() # TODO make this into a reusable function? See run_level_1_analysis.get_events_make_template sys.exit('\n\n{}: {}\n\n'.format(err_type.__name__, err_msg)) get_and_print_time_since(os.path.basename(path_to_subscript) + ' started', start_time) return # Explicitly end this function so multiprocessing knows it's done def save_to_json_and_get_path(a_dict, dict_name, out_dir): """ :param a_dict: Dictionary with only string keys :param dict_name: String naming a_dict :param out_dir: String, a valid path to a real directory to save the .json file containing a_dict into :return: String, the full path to the .json file containing a_dict """ json_path = os.path.join(out_dir, 'abcd-bids-pipeline-{}_{}.json'.format( dict_name, datetime.now().strftime('%Y-%b-%d_%H-%M') )) with open(json_path, 'w+') as json_file: json_file.write(json.dumps(a_dict)) return json_path def valid_float_0_to_1(val): """ :param val: Object to check, then throw an error if it is invalid :return: val if it is a float between 0 and 1 (otherwise invalid) """ return validate(val, lambda x: 0 <= float(x) <= 1, float, 'Value must be a number between 0 and 1') def valid_output_dir(path): """ Try to make a folder for new files at path; throw exception if that fails :param path: String which is a valid (not necessarily real) folder path :return: String which is a validated absolute path to real writeable folder """ return validate(path, lambda x: os.access(x, os.W_OK), valid_readable_dir, 'Cannot create directory at {}', lambda y: os.makedirs(y, exist_ok=True)) def valid_readable_dir(path): """ :param path: Parameter to check if it represents a valid directory path :return: String representing a valid directory path """ return validate(path, os.path.isdir, valid_readable_file, 'Cannot read directory at {}') def valid_readable_file(path): """ Throw exception unless parameter is a valid readable filepath string. Use this, not argparse.FileType('r') which leaves an open file handle. :param path: Parameter to check if it represents a valid filepath :return: String representing a valid filepath """ return validate(path, lambda x: os.access(x, os.R_OK), os.path.abspath, 'Cannot read file at {}') def valid_readable_json(path): """ :param path: Parameter to check if it represents a valid .json file path :return: String representing a valid .json file path """ return validate(path, lambda x: os.path.splitext(path)[-1] == '.json', valid_readable_file, '{} is not a readable .json filepath') def valid_subj_ses(in_arg, prefix, name): #, *keywords): """ :param in_arg: Object to check if it is a valid subject ID or session name :param prefix: String, 'sub-' or 'ses-' :param name: String describing what in_arg should be (e.g. 'subject') :return: True if in_arg is a valid subject ID or session name; else False """ return validate(in_arg, lambda _: True, # lambda x: any([key in x for key in [prefix, *keywords]]), lambda y: (y if y[:len(prefix)] == prefix else prefix + y), '{}' + ' is not a valid {}'.format(name)) def valid_template_filename(fname): """ :param fname: Parameter to check if it represents a .fsf file name :return: String representing the .fsf file name """ return validate(fname, lambda x: os.path.splitext(x)[-1] == '.fsf', lambda y: y, '{} is not an .fsf file name') def valid_time_str(in_arg): """ :param in_arg: Object to check if it's a time string in the HH:MM:SS format :return: True if in_arg is a time limit string in that format; else False """ try: split = in_arg.split(":") assert len(split) == 3 for each_num in split: assert each_num.isdigit() assert int(each_num) >= 0 return in_arg except (TypeError, AssertionError, ValueError): raise argparse.ArgumentTypeError('Invalid time string.') def valid_whole_number(to_validate): """ Throw argparse exception unless to_validate is a positive integer :param to_validate: Object to test whether it is a positive integer :return: to_validate if it is a positive integer """ return validate(to_validate, lambda x: int(x) >= 0, int, '{} is not a positive integer') def validate(to_validate, is_real, make_valid, err_msg, prepare=None): """ Parent/base function used by different type validation functions. Raises an argparse.ArgumentTypeError if the input object is somehow invalid. :param to_validate: String to check if it represents a valid object :param is_real: Function which returns true iff to_validate is real :param make_valid: Function which returns a fully validated object :param err_msg: String to show to user to tell them what is invalid :param prepare: Function to run before validation :return: to_validate, but fully validated """ try: if prepare: prepare(to_validate) assert is_real(to_validate) return make_valid(to_validate) except (OSError, TypeError, AssertionError, ValueError, argparse.ArgumentTypeError): raise argparse.ArgumentTypeError(err_msg.format(to_validate)) def validate_cli_args(cli_args, parser, arg_names=set()): """ Validate types and set defaults for any arg whose validation depends on another arg and therefore was not possible in get_pipeline_cli_argparser :param cli_args: Dictionary containing all command-line arguments from user :param parser: argparse.ArgumentParser to raise error if anything's invalid :param arg_names: Set containing SCAN_ARG if that argument is needed :return: cli_args, but fully validated """ # Default levels, template file directory, and scanner info file path cli_args = ensure_dict_has(cli_args, 'levels', [1, 2] if len(cli_args['runs']) > 1 else [1]) cli_args = ensure_dict_has(cli_args, 'templates', os.path.join(SCRIPT_DIR, 'templates')) if SCAN_ARG in arg_names: cli_args = ensure_dict_has(cli_args, SCAN_ARG, os.path.join( SCRIPT_DIR, 'scan_info', SCAN_ARG + '.csv' )) for lvl in cli_args['levels']: # Default template file names cli_args = ensure_dict_has(cli_args, 'template{}'.format(lvl), ( 'template_DCAN_version_{}_level{}_UPDATED_FINAL.fsf' .format(cli_args['task'], lvl) )) validate_template_file(cli_args, lvl, parser) # Default paths to FSL and wb_command ERR_MSG = 'No {} found. Please include the {} argument.' if not (dict_has(cli_args, 'wb_command') and os.access(cli_args['wb_command'], os.X_OK)): parser.error(ERR_MSG.format('wb_command executable', '--wb-command')) if not dict_has(cli_args, 'fsl_dir'): fsl = get_default_ext_command('fsl') cli_args['fsl_dir'] = os.path.dirname(fsl) if fsl else parser.error( ERR_MSG.format('FSL directory', '--fsl-dir') ) # Default output/temp/event files directories. Avoiding ensure_dict_has to if not dict_has(cli_args, 'output'): # prevent permissions error from cli_args['output'] = valid_output_dir( # valid_output_dir making dirs. os.path.join(cli_args['study_dir'], 'derivatives', 'abcd-bids-tfm' 'ri-pipeline', cli_args['subject'], cli_args['ses']) ) for arg in ('temp_dir', 'events_dir'): if not dict_has(cli_args, arg): cli_args[arg] = valid_output_dir( os.path.join(cli_args['output'], 'level-1', arg.split('_')[0]) ) return cli_args def validate_template_file(cli_args, lvl, parser): """ Verify that template .fsf file exists :param cli_args: Dictionary containing all command-line arguments from user :param lvl: String or int defining the analysis level, 1 or 2 or "1" or "2" :param parser: argparse.ArgumentParser to raise error if anything's invalid """ tmpl = 'template{}'.format(lvl) tmpl_fpath = os.path.join(cli_args['templates'], cli_args[tmpl]) if not os.path.exists(tmpl_fpath): parser.error('{} does not exist. Please re-run with a different --{} ' 'or --templates argument.'.format(tmpl_fpath, tmpl)) def wb_command(cli_args, *args): """ Call wb_command executable with any given parameters :param cli_args: Dictionary mapping 'wb_command' key to wb_command filepath :param args: List of all parameters to call wb_command with, in order """ subprocess.check_call([cli_args['wb_command'], *args]) def wb_command_get_info(wb_command, dtseries, arg_only): """ Call wb_command with -file-information and -no-map-info parameters :param wb_command: String, path to existing workbench wb_command executable :param dtseries: String, the path to a .dtseries.nii file with file info :param arg_only: String, the last part of the name of a wb_command argument starting with '-only-' :return: String representing a numerical value describing the dtseries """ return os.popen('{} -file-information {} -no-map-info -only-{}' .format(wb_command, dtseries, arg_only)).read().rstrip() def wb_LR_pair(func_LR, arg_LR=None, after=True): """ Get wb_command left- and right- arguments :param func_LR: Function which accepts 'L' or 'R' and returns a filepath :param arg_LR: String naming the left- or right- argument :param after: True if arg_LR goes after the word 'left'/'right'; else False :return: List with 4 elements, arg name and then value for left then right """ arg_LR = '-' + arg_LR if arg_LR else '' arg_fmt = '-{}' + arg_LR if after else arg_LR + '-{}' return [arg_fmt.format('left'), func_LR('L'), arg_fmt.format('right'), func_LR('R')]
context.py
#!/usr/bin/env python3 from http import HTTPStatus from urllib.parse import urlparse # import socketserver import threading import http.server import json import queue import socket import subprocess import time import uuid import string import random import yaml import requests import websocket from sqlalchemy import create_engine from sqlalchemy.schema import MetaData import graphql_server import graphql class HGECtxError(Exception): pass class GQLWsClient(): def __init__(self, hge_ctx, endpoint): self.hge_ctx = hge_ctx self.ws_queue = queue.Queue(maxsize=-1) self.ws_url = urlparse(hge_ctx.hge_url)._replace(scheme='ws', path=endpoint) self.create_conn() def create_conn(self): self.ws_queue.queue.clear() self.ws_id_query_queues = dict() self.ws_active_query_ids = set() self._ws = websocket.WebSocketApp(self.ws_url.geturl(), on_message=self._on_message, on_close=self._on_close) self.wst = threading.Thread(target=self._ws.run_forever) self.wst.daemon = True self.wst.start() self.remote_closed = False self.connected = False self.init_done = False def recreate_conn(self): self.teardown() self.create_conn() def get_ws_event(self, timeout): return self.ws_queue.get(timeout=timeout) def has_ws_query_events(self, query_id): return not self.ws_id_query_queues[query_id].empty() def get_ws_query_event(self, query_id, timeout): return self.ws_id_query_queues[query_id].get(timeout=timeout) def send(self, frame): if not self.connected: self.recreate_conn() time.sleep(1) if frame.get('type') == 'stop': self.ws_active_query_ids.discard( frame.get('id') ) elif frame.get('type') == 'start' and 'id' in frame: self.ws_id_query_queues[frame['id']] = queue.Queue(maxsize=-1) self._ws.send(json.dumps(frame)) def init_as_admin(self): headers={} if self.hge_ctx.hge_key: headers = {'x-hasura-admin-secret': self.hge_ctx.hge_key} self.init(headers) def init(self, headers={}): payload = {'type': 'connection_init', 'payload': {}} if headers and len(headers) > 0: payload['payload']['headers'] = headers self.send(payload) ev = self.get_ws_event(3) assert ev['type'] == 'connection_ack', ev self.init_done = True def stop(self, query_id): data = {'id': query_id, 'type': 'stop'} self.send(data) self.ws_active_query_ids.discard(query_id) def gen_id(self, size=6, chars=string.ascii_letters + string.digits): new_id = ''.join(random.choice(chars) for _ in range(size)) if new_id in self.ws_active_query_ids: return self.gen_id(size, chars) return new_id def send_query(self, query, query_id=None, headers={}, timeout=60): graphql.parse(query['query']) if headers and len(headers) > 0: #Do init If headers are provided self.init(headers) elif not self.init_done: self.init() if query_id == None: query_id = self.gen_id() frame = { 'id': query_id, 'type': 'start', 'payload': query, } self.ws_active_query_ids.add(query_id) self.send(frame) while True: yield self.get_ws_query_event(query_id, timeout) def _on_open(self): self.connected = True def _on_message(self, message): json_msg = json.loads(message) if 'id' in json_msg: query_id = json_msg['id'] if json_msg.get('type') == 'stop': #Remove from active queries list self.ws_active_query_ids.discard( query_id ) if not query_id in self.ws_id_query_queues: self.ws_id_query_queues[json_msg['id']] = queue.Queue(maxsize=-1) #Put event in the correponding query_queue self.ws_id_query_queues[query_id].put(json_msg) elif json_msg['type'] == 'ka': self.connected = True else: #Put event in the main queue self.ws_queue.put(json_msg) def _on_close(self): self.remote_closed = True self.connected = False self.init_done = False def teardown(self): if not self.remote_closed: self._ws.close() self.wst.join() class EvtsWebhookHandler(http.server.BaseHTTPRequestHandler): def do_GET(self): self.send_response(HTTPStatus.OK) self.end_headers() def do_POST(self): content_len = self.headers.get('Content-Length') req_body = self.rfile.read(int(content_len)).decode("utf-8") req_json = json.loads(req_body) req_headers = self.headers req_path = self.path self.log_message(json.dumps(req_json)) if req_path == "/fail": self.send_response(HTTPStatus.INTERNAL_SERVER_ERROR) self.end_headers() self.server.error_queue.put({"path": req_path, "body": req_json, "headers": req_headers}) elif req_path == "/timeout_short": time.sleep(5) self.send_response(HTTPStatus.NO_CONTENT) self.end_headers() self.server.error_queue.put({"path": req_path, "body": req_json, "headers": req_headers}) elif req_path == "/timeout_long": time.sleep(5) self.send_response(HTTPStatus.NO_CONTENT) self.end_headers() self.server.resp_queue.put({"path": req_path, "body": req_json, "headers": req_headers}) else: self.send_response(HTTPStatus.NO_CONTENT) self.end_headers() self.server.resp_queue.put({"path": req_path, "body": req_json, "headers": req_headers}) class EvtsWebhookServer(http.server.HTTPServer): def __init__(self, server_address): self.resp_queue = queue.Queue(maxsize=1) self.error_queue = queue.Queue() super().__init__(server_address, EvtsWebhookHandler) def server_bind(self): self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.socket.bind(self.server_address) def get_event(self, timeout): return self.resp_queue.get(timeout=timeout) def get_error_queue_size(self): sz = 0 while not self.error_queue.empty(): self.error_queue.get() sz = sz + 1 return sz def teardown(self): self.evt_trggr_httpd.shutdown() self.evt_trggr_httpd.server_close() graphql_server.stop_server(self.graphql_server) self.gql_srvr_thread.join() self.evt_trggr_web_server.join() class HGECtxGQLServer: def __init__(self): # start the graphql server self.graphql_server = graphql_server.create_server('127.0.0.1', 5000) self.gql_srvr_thread = threading.Thread(target=self.graphql_server.serve_forever) self.gql_srvr_thread.start() def teardown(self): graphql_server.stop_server(self.graphql_server) self.gql_srvr_thread.join() class HGECtx: def __init__(self, hge_url, pg_url, hge_key, hge_webhook, webhook_insecure, hge_jwt_key_file, hge_jwt_conf, metadata_disabled, ws_read_cookie, hge_scale_url): self.http = requests.Session() self.hge_key = hge_key self.hge_url = hge_url self.pg_url = pg_url self.hge_webhook = hge_webhook if hge_jwt_key_file is None: self.hge_jwt_key = None else: with open(hge_jwt_key_file) as f: self.hge_jwt_key = f.read() self.hge_jwt_conf = hge_jwt_conf self.webhook_insecure = webhook_insecure self.metadata_disabled = metadata_disabled self.may_skip_test_teardown = False self.engine = create_engine(self.pg_url) self.meta = MetaData() self.ws_read_cookie = ws_read_cookie self.hge_scale_url = hge_scale_url self.ws_client = GQLWsClient(self, '/v1/graphql') result = subprocess.run(['../../scripts/get-version.sh'], shell=False, stdout=subprocess.PIPE, check=True) self.version = result.stdout.decode('utf-8').strip() if not self.metadata_disabled: try: st_code, resp = self.v1q_f('queries/clear_db.yaml') except requests.exceptions.RequestException as e: self.teardown() raise HGECtxError(repr(e)) assert st_code == 200, resp def reflect_tables(self): self.meta.reflect(bind=self.engine) def anyq(self, u, q, h): resp = self.http.post( self.hge_url + u, json=q, headers=h ) return resp.status_code, resp.json() def sql(self, q): conn = self.engine.connect() res = conn.execute(q) conn.close() return res def v1q(self, q, headers = {}): h = headers.copy() if self.hge_key is not None: h['X-Hasura-Admin-Secret'] = self.hge_key resp = self.http.post( self.hge_url + "/v1/query", json=q, headers=h ) return resp.status_code, resp.json() def v1q_f(self, fn): with open(fn) as f: return self.v1q(yaml.safe_load(f)) def teardown(self): self.http.close() self.engine.dispose()
updator.py
"""Has classes that help updating Prompt sections using Threads.""" import builtins import concurrent.futures import threading import typing as tp from prompt_toolkit import PromptSession from prompt_toolkit.formatted_text import PygmentsTokens from xonsh2.prompt.base import ParsedTokens from xonsh2.style_tools import partial_color_tokenize, style_as_faded class Executor: """Caches thread results across prompts.""" def __init__(self): self.thread_pool = concurrent.futures.ThreadPoolExecutor( max_workers=builtins.__xonsh__.env["ASYNC_PROMPT_THREAD_WORKERS"] ) # the attribute, .cache is cleared between calls. # This caches results from callback alone by field name. self.thread_results = {} def submit(self, func: tp.Callable, field: str): future = self.thread_pool.submit(self._run_func, func, field) place_holder = "{" + field + "}" return ( future, ( self.thread_results[field] if field in self.thread_results else place_holder ), place_holder, ) def _run_func(self, func, field): """Run the callback and store the result.""" result = func() self.thread_results[field] = ( result if result is None else style_as_faded(result) ) return result class AsyncPrompt: """Represent an asynchronous prompt.""" def __init__(self, name: str, session: PromptSession, executor: Executor): """ Parameters ---------- name: str what prompt to update. One of ['message', 'rprompt', 'bottom_toolbar'] session: PromptSession current ptk session """ self.name = name # list of tokens in that prompt. It could either be resolved or not resolved. self.tokens: tp.Optional[ParsedTokens] = None self.timer = None self.session = session self.executor = executor # (Key: the future object) that is created for the (value: index/field_name) in the tokens list self.futures: tp.Dict[ concurrent.futures.Future, tp.Tuple[str, tp.Optional[int], tp.Optional[str], tp.Optional[str]], ] = {} def start_update(self, on_complete): """Listen on futures and update the prompt as each one completed. Timer is used to avoid clogging multiple calls at the same time. Parameters ----------- on_complete: callback to notify after all the futures are completed """ if not self.tokens: print(f"Warn: AsyncPrompt is created without tokens - {self.name}") return for fut in concurrent.futures.as_completed(self.futures): val = fut.result() if fut not in self.futures: # rare case where the future is completed but the container is already cleared # because new prompt is called continue placeholder, idx, spec, conv = self.futures[fut] # example: placeholder="{field}", idx=10, spec="env: {}" if isinstance(idx, int): self.tokens.update(idx, val, spec, conv) else: # when the function is called outside shell. for idx, ptok in enumerate(self.tokens.tokens): if placeholder in ptok.value: val = ptok.value.replace(placeholder, val) self.tokens.update(idx, val, spec, conv) # calling invalidate in less period is inefficient self.invalidate() on_complete(self.name) def invalidate(self): """Create a timer to update the prompt. The timing can be configured through env variables. threading.Timer is used to stop calling invalidate frequently. """ from xonsh2.ptk_shell.shell import tokenize_ansi if self.timer: self.timer.cancel() def _invalidate(): new_prompt = self.tokens.process() formatted_tokens = tokenize_ansi( PygmentsTokens(partial_color_tokenize(new_prompt)) ) setattr(self.session, self.name, formatted_tokens) self.session.app.invalidate() self.timer = threading.Timer( builtins.__xonsh__.env["ASYNC_INVALIDATE_INTERVAL"], _invalidate ) self.timer.start() def stop(self): """Stop any running threads""" for fut in self.futures: fut.cancel() self.futures.clear() def submit_section( self, func: tp.Callable, field: str, idx: tp.Optional[int] = None, spec: tp.Optional[str] = None, conv=None, ): future, intermediate_value, placeholder = self.executor.submit(func, field) self.futures[future] = (placeholder, idx, spec, conv) return intermediate_value class PromptUpdator: """Handle updating multiple AsyncPrompt instances prompt/rprompt/bottom_toolbar""" def __init__(self, session: PromptSession): self.prompts: tp.Dict[str, AsyncPrompt] = {} self.prompter = session self.executor = Executor() def add(self, prompt_name: tp.Optional[str]) -> tp.Optional[AsyncPrompt]: # clear out old futures from the same prompt if prompt_name is None: return None if prompt_name in self.prompts: self.stop(prompt_name) self.prompts[prompt_name] = AsyncPrompt( prompt_name, self.prompter, self.executor ) return self.prompts[prompt_name] def start(self): """after ptk prompt is created, update it in background.""" threads = [ threading.Thread(target=prompt.start_update, args=[self.on_complete]) for pt_name, prompt in self.prompts.items() ] for th in threads: th.start() def stop(self, prompt_name: str): if prompt_name in self.prompts: self.prompts[prompt_name].stop() def on_complete(self, prompt_name): self.prompts.pop(prompt_name, None)