repo_name
stringlengths
6
100
path
stringlengths
4
294
copies
stringclasses
981 values
size
stringlengths
4
6
content
stringlengths
606
896k
license
stringclasses
15 values
dpiers/coderang-meteor
public/jsrepl/extern/python/unclosured/lib/python2.7/glob.py
173
2249
"""Filename globbing utility.""" import sys import os import re import fnmatch __all__ = ["glob", "iglob"] def glob(pathname): """Return a list of paths matching a pathname pattern. The pattern may contain simple shell-style wildcards a la fnmatch. """ return list(iglob(pathname)) def iglob(pathname): """Return an iterator which yields the paths matching a pathname pattern. The pattern may contain simple shell-style wildcards a la fnmatch. """ if not has_magic(pathname): if os.path.lexists(pathname): yield pathname return dirname, basename = os.path.split(pathname) if not dirname: for name in glob1(os.curdir, basename): yield name return if has_magic(dirname): dirs = iglob(dirname) else: dirs = [dirname] if has_magic(basename): glob_in_dir = glob1 else: glob_in_dir = glob0 for dirname in dirs: for name in glob_in_dir(dirname, basename): yield os.path.join(dirname, name) # These 2 helper functions non-recursively glob inside a literal directory. # They return a list of basenames. `glob1` accepts a pattern while `glob0` # takes a literal basename (so it only has to check for its existence). def glob1(dirname, pattern): if not dirname: dirname = os.curdir if isinstance(pattern, unicode) and not isinstance(dirname, unicode): dirname = unicode(dirname, sys.getfilesystemencoding() or sys.getdefaultencoding()) try: names = os.listdir(dirname) except os.error: return [] if pattern[0] != '.': names = filter(lambda x: x[0] != '.', names) return fnmatch.filter(names, pattern) def glob0(dirname, basename): if basename == '': # `os.path.split()` returns an empty basename for paths ending with a # directory separator. 'q*x/' should match only directories. if os.path.isdir(dirname): return [basename] else: if os.path.lexists(os.path.join(dirname, basename)): return [basename] return [] magic_check = re.compile('[*?[]') def has_magic(s): return magic_check.search(s) is not None
mit
hikelee/launcher
launcher/templatetags/helpers.py
1
6201
""" sentry.templatetags.sentry_helpers ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import import functools import os.path from collections import namedtuple from datetime import timedelta import pytz import six from django import template from django.conf import settings from django.template.defaultfilters import stringfilter from django.utils import timezone from django.utils.html import escape from django.utils.safestring import mark_safe from django.utils.translation import ugettext as _ from six.moves import range from six.moves.urllib.parse import quote from launcher.utils.strings import soft_break as _soft_break,soft_hyphenate,to_unicode,truncatechars SentryVersion=namedtuple('SentryVersion',[ 'current', 'latest', 'update_available', 'build', ]) register=template.Library() truncatechars=register.filter(stringfilter(truncatechars)) truncatechars.is_safe=True @register.filter def multiply(x,y): def coerce(value): if isinstance(value,(six.integer_types,float)): return value try: return int(value) except ValueError: return float(value) return coerce(x)*coerce(y) @register.filter def pprint(value,break_after=10): """ break_after is used to define how often a <span> is inserted (for soft wrapping). """ value=to_unicode(value) return mark_safe( u'<span></span>'. join([escape(value[i:(i+break_after)]) for i in range(0,len(value),break_after)]) ) @register.filter def is_url(value): if not isinstance(value,six.string_types): return False if not value.startswith(('http://','https://')): return False if ' ' in value: return False return True # seriously Django? @register.filter def subtract(value,amount): return int(value)-int(amount) @register.filter def absolute_value(value): return abs(int(value) if isinstance(value,six.integer_types) else float(value)) @register.filter def has_charts(group): from launcher.utils.db import has_charts if hasattr(group,'_state'): db=group._state.db or 'default' else: db='default' return has_charts(db) @register.filter def as_sorted(value): return sorted(value) @register.filter def small_count(v,precision=1): if not v: return 0 z=[ (1000000000,_('b')), (1000000,_('m')), (1000,_('k')), ] v=int(v) for x,y in z: o,p=divmod(v,x) if o: if len(six.text_type(o))>2 or not p: return '%d%s'%(o,y) return ('%.{}f%s'.format(precision))%(v/float(x),y) return v @register.filter def num_digits(value): return len(six.text_type(value)) @register.filter def to_str(data): return six.text_type(data) @register.filter def is_none(value): return value is None @register.filter def timesince(value,now=None): from django.template.defaultfilters import timesince if now is None: now=timezone.now() if not value: return _('never') if value<(now-timedelta(days=5)): return value.date() value=(' '.join(timesince(value,now).split(' ')[0:2])).strip(',') if value==_('0 minutes'): return _('just now') if value==_('1 day'): return _('yesterday') return value+_(' ago') @register.filter def duration(value): if not value: return '0s' # value is assumed to be in ms value=value/1000.0 hours,minutes,seconds=0,0,0 if value>3600: hours=value/3600 value=value%3600 if value>60: minutes=value/60 value=value%60 seconds=value output=[] if hours: output.append('%dh'%hours) if minutes: output.append('%dm'%minutes) if seconds>1: output.append('%0.2fs'%seconds) elif seconds: output.append('%dms'%(seconds*1000)) return ''.join(output) @register.filter def date(dt,arg=None): from django.template.defaultfilters import date if not timezone.is_aware(dt): dt=dt.replace(tzinfo=timezone.utc) return date(dt,arg) @register.filter def trim_schema(value): return value.split('//',1)[-1] @register.filter def with_metadata(group_list,request): group_list=list(group_list) if request.user.is_authenticated() and group_list: project=group_list[0].project bookmarks=set( project.bookmark_set.filter( user=request.user, group__in=group_list, ).values_list('group_id',flat=True) ) else: bookmarks=set() # TODO(dcramer): this is obsolete and needs to pull from the tsdb backend historical_data={} for g in group_list: yield g,{ 'is_bookmarked':g.pk in bookmarks, 'historical_data':','.join(six.text_type(x[1]) for x in historical_data.get(g.id,[])), } @register.simple_tag def percent(value,total,format=None): if not (value and total): result=0 else: result=int(value)/float(total)*100 if format is None: return int(result) else: return ('%%%s'%format)%result @register.filter def titlize(value): return value.replace('_',' ').title() @register.filter def split(value,delim=''): return value.split(delim) @register.inclusion_tag('sentry/partial/github_button.html') def github_button(user,repo): return { 'user':user, 'repo':repo, } @register.filter def urlquote(value,safe=''): return quote(value.encode('utf8'),safe) @register.filter def basename(value): return os.path.basename(value) @register.filter def user_display_name(user): return user.name or user.username @register.simple_tag(takes_context=True) def localized_datetime(context,dt,format='DATETIME_FORMAT'): request=context['request'] timezone=getattr(request,'timezone',None) if not timezone: timezone=pytz.timezone(settings.SENTRY_DEFAULT_TIME_ZONE) dt=dt.astimezone(timezone) return date(dt,format) @register.filter def format_userinfo(user): parts=user.username.split('@') if len(parts)==1: username=user.username else: username=parts[0].lower() return mark_safe('<span title="%s">%s</span>'%(escape(user.username),escape(username),)) @register.filter def soft_break(value,length): return _soft_break( value, length, functools.partial(soft_hyphenate,length=max(length//10,10)), )
mit
zubron/servo
components/script/dom/bindings/codegen/BindingGen.py
150
1729
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import sys import os sys.path.append(os.path.join(".", "parser")) sys.path.append(os.path.join(".", "ply")) import cPickle from Configuration import Configuration from CodegenRust import CGBindingRoot, replaceFileIfChanged def generate_binding_rs(config, outputprefix, webidlfile): """ |config| Is the configuration object. |outputprefix| is a prefix to use for the header guards and filename. """ filename = outputprefix + ".rs" module = CGBindingRoot(config, outputprefix, webidlfile).define() if not module: print "Skipping empty module: %s" % (filename) elif replaceFileIfChanged(filename, module): print "Generating binding implementation: %s" % (filename) def main(): # Parse arguments. from optparse import OptionParser usagestring = "usage: %prog configFile outputdir outputPrefix webIDLFile" o = OptionParser(usage=usagestring) (options, args) = o.parse_args() if len(args) != 4: o.error(usagestring) configFile = os.path.normpath(args[0]) outputdir = args[1] outputPrefix = args[2] webIDLFile = os.path.normpath(args[3]) # Load the parsing results resultsPath = os.path.join(outputdir, 'ParserResults.pkl') with open(resultsPath, 'rb') as f: parserData = cPickle.load(f) # Create the configuration data. config = Configuration(configFile, parserData) # Generate the prototype classes. generate_binding_rs(config, outputPrefix, webIDLFile) if __name__ == '__main__': main()
mpl-2.0
louyihua/edx-platform
lms/djangoapps/survey/migrations/0001_initial.py
50
2289
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import django.utils.timezone from django.conf import settings import model_utils.fields import xmodule_django.models class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='SurveyAnswer', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, verbose_name='created', editable=False)), ('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False)), ('field_name', models.CharField(max_length=255, db_index=True)), ('field_value', models.CharField(max_length=1024)), ('course_key', xmodule_django.models.CourseKeyField(max_length=255, null=True, db_index=True)), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='SurveyForm', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, verbose_name='created', editable=False)), ('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False)), ('name', models.CharField(unique=True, max_length=255, db_index=True)), ('form', models.TextField()), ], options={ 'abstract': False, }, ), migrations.AddField( model_name='surveyanswer', name='form', field=models.ForeignKey(to='survey.SurveyForm'), ), migrations.AddField( model_name='surveyanswer', name='user', field=models.ForeignKey(to=settings.AUTH_USER_MODEL), ), ]
agpl-3.0
scue/vim-ycm_win7
third_party/requests/requests/packages/urllib3/connectionpool.py
223
25767
# urllib3/connectionpool.py # Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) # # This module is part of urllib3 and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php import errno import logging from socket import error as SocketError, timeout as SocketTimeout import socket try: # Python 3 from queue import LifoQueue, Empty, Full except ImportError: from Queue import LifoQueue, Empty, Full import Queue as _ # Platform-specific: Windows from .exceptions import ( ClosedPoolError, ConnectTimeoutError, EmptyPoolError, HostChangedError, MaxRetryError, SSLError, TimeoutError, ReadTimeoutError, ProxyError, ) from .packages.ssl_match_hostname import CertificateError from .packages import six from .connection import ( port_by_scheme, DummyConnection, HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection, HTTPException, BaseSSLError, ) from .request import RequestMethods from .response import HTTPResponse from .util import ( assert_fingerprint, get_host, is_connection_dropped, Timeout, ) xrange = six.moves.xrange log = logging.getLogger(__name__) _Default = object() ## Pool objects class ConnectionPool(object): """ Base class for all connection pools, such as :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. """ scheme = None QueueCls = LifoQueue def __init__(self, host, port=None): # httplib doesn't like it when we include brackets in ipv6 addresses host = host.strip('[]') self.host = host self.port = port def __str__(self): return '%s(host=%r, port=%r)' % (type(self).__name__, self.host, self.port) # This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 _blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK]) class HTTPConnectionPool(ConnectionPool, RequestMethods): """ Thread-safe connection pool for one host. :param host: Host used for this HTTP Connection (e.g. "localhost"), passed into :class:`httplib.HTTPConnection`. :param port: Port used for this HTTP Connection (None is equivalent to 80), passed into :class:`httplib.HTTPConnection`. :param strict: Causes BadStatusLine to be raised if the status line can't be parsed as a valid HTTP/1.0 or 1.1 status line, passed into :class:`httplib.HTTPConnection`. .. note:: Only works in Python 2. This parameter is ignored in Python 3. :param timeout: Socket timeout in seconds for each individual connection. This can be a float or integer, which sets the timeout for the HTTP request, or an instance of :class:`urllib3.util.Timeout` which gives you more fine-grained control over request timeouts. After the constructor has been parsed, this is always a `urllib3.util.Timeout` object. :param maxsize: Number of connections to save that can be reused. More than 1 is useful in multithreaded situations. If ``block`` is set to false, more connections will be created but they will not be saved once they've been used. :param block: If set to True, no more than ``maxsize`` connections will be used at a time. When no free connections are available, the call will block until a connection has been released. This is a useful side effect for particular multithreaded situations where one does not want to use more than maxsize connections per host to prevent flooding. :param headers: Headers to include with all requests, unless other headers are given explicitly. :param _proxy: Parsed proxy URL, should not be used directly, instead, see :class:`urllib3.connectionpool.ProxyManager`" :param _proxy_headers: A dictionary with proxy headers, should not be used directly, instead, see :class:`urllib3.connectionpool.ProxyManager`" """ scheme = 'http' ConnectionCls = HTTPConnection def __init__(self, host, port=None, strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False, headers=None, _proxy=None, _proxy_headers=None): ConnectionPool.__init__(self, host, port) RequestMethods.__init__(self, headers) self.strict = strict # This is for backwards compatibility and can be removed once a timeout # can only be set to a Timeout object if not isinstance(timeout, Timeout): timeout = Timeout.from_float(timeout) self.timeout = timeout self.pool = self.QueueCls(maxsize) self.block = block self.proxy = _proxy self.proxy_headers = _proxy_headers or {} # Fill the queue up so that doing get() on it will block properly for _ in xrange(maxsize): self.pool.put(None) # These are mostly for testing and debugging purposes. self.num_connections = 0 self.num_requests = 0 def _new_conn(self): """ Return a fresh :class:`HTTPConnection`. """ self.num_connections += 1 log.info("Starting new HTTP connection (%d): %s" % (self.num_connections, self.host)) extra_params = {} if not six.PY3: # Python 2 extra_params['strict'] = self.strict conn = self.ConnectionCls(host=self.host, port=self.port, timeout=self.timeout.connect_timeout, **extra_params) if self.proxy is not None: # Enable Nagle's algorithm for proxies, to avoid packet # fragmentation. conn.tcp_nodelay = 0 return conn def _get_conn(self, timeout=None): """ Get a connection. Will return a pooled connection if one is available. If no connections are available and :prop:`.block` is ``False``, then a fresh connection is returned. :param timeout: Seconds to wait before giving up and raising :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and :prop:`.block` is ``True``. """ conn = None try: conn = self.pool.get(block=self.block, timeout=timeout) except AttributeError: # self.pool is None raise ClosedPoolError(self, "Pool is closed.") except Empty: if self.block: raise EmptyPoolError(self, "Pool reached maximum size and no more " "connections are allowed.") pass # Oh well, we'll create a new connection then # If this is a persistent connection, check if it got disconnected if conn and is_connection_dropped(conn): log.info("Resetting dropped connection: %s" % self.host) conn.close() return conn or self._new_conn() def _put_conn(self, conn): """ Put a connection back into the pool. :param conn: Connection object for the current host and port as returned by :meth:`._new_conn` or :meth:`._get_conn`. If the pool is already full, the connection is closed and discarded because we exceeded maxsize. If connections are discarded frequently, then maxsize should be increased. If the pool is closed, then the connection will be closed and discarded. """ try: self.pool.put(conn, block=False) return # Everything is dandy, done. except AttributeError: # self.pool is None. pass except Full: # This should never happen if self.block == True log.warning("HttpConnectionPool is full, discarding connection: %s" % self.host) # Connection never got put back into the pool, close it. if conn: conn.close() def _get_timeout(self, timeout): """ Helper that always returns a :class:`urllib3.util.Timeout` """ if timeout is _Default: return self.timeout.clone() if isinstance(timeout, Timeout): return timeout.clone() else: # User passed us an int/float. This is for backwards compatibility, # can be removed later return Timeout.from_float(timeout) def _make_request(self, conn, method, url, timeout=_Default, **httplib_request_kw): """ Perform a request on a given urllib connection object taken from our pool. :param conn: a connection from one of our connection pools :param timeout: Socket timeout in seconds for the request. This can be a float or integer, which will set the same timeout value for the socket connect and the socket read, or an instance of :class:`urllib3.util.Timeout`, which gives you more fine-grained control over your timeouts. """ self.num_requests += 1 timeout_obj = self._get_timeout(timeout) try: timeout_obj.start_connect() conn.timeout = timeout_obj.connect_timeout # conn.request() calls httplib.*.request, not the method in # urllib3.request. It also calls makefile (recv) on the socket. conn.request(method, url, **httplib_request_kw) except SocketTimeout: raise ConnectTimeoutError( self, "Connection to %s timed out. (connect timeout=%s)" % (self.host, timeout_obj.connect_timeout)) # Reset the timeout for the recv() on the socket read_timeout = timeout_obj.read_timeout # App Engine doesn't have a sock attr if hasattr(conn, 'sock'): # In Python 3 socket.py will catch EAGAIN and return None when you # try and read into the file pointer created by http.client, which # instead raises a BadStatusLine exception. Instead of catching # the exception and assuming all BadStatusLine exceptions are read # timeouts, check for a zero timeout before making the request. if read_timeout == 0: raise ReadTimeoutError( self, url, "Read timed out. (read timeout=%s)" % read_timeout) if read_timeout is Timeout.DEFAULT_TIMEOUT: conn.sock.settimeout(socket.getdefaulttimeout()) else: # None or a value conn.sock.settimeout(read_timeout) # Receive the response from the server try: try: # Python 2.7+, use buffering of HTTP responses httplib_response = conn.getresponse(buffering=True) except TypeError: # Python 2.6 and older httplib_response = conn.getresponse() except SocketTimeout: raise ReadTimeoutError( self, url, "Read timed out. (read timeout=%s)" % read_timeout) except BaseSSLError as e: # Catch possible read timeouts thrown as SSL errors. If not the # case, rethrow the original. We need to do this because of: # http://bugs.python.org/issue10272 if 'timed out' in str(e) or \ 'did not complete (read)' in str(e): # Python 2.6 raise ReadTimeoutError(self, url, "Read timed out.") raise except SocketError as e: # Platform-specific: Python 2 # See the above comment about EAGAIN in Python 3. In Python 2 we # have to specifically catch it and throw the timeout error if e.errno in _blocking_errnos: raise ReadTimeoutError( self, url, "Read timed out. (read timeout=%s)" % read_timeout) raise # AppEngine doesn't have a version attr. http_version = getattr(conn, '_http_vsn_str', 'HTTP/?') log.debug("\"%s %s %s\" %s %s" % (method, url, http_version, httplib_response.status, httplib_response.length)) return httplib_response def close(self): """ Close all pooled connections and disable the pool. """ # Disable access to the pool old_pool, self.pool = self.pool, None try: while True: conn = old_pool.get(block=False) if conn: conn.close() except Empty: pass # Done. def is_same_host(self, url): """ Check if the given ``url`` is a member of the same host as this connection pool. """ if url.startswith('/'): return True # TODO: Add optional support for socket.gethostbyname checking. scheme, host, port = get_host(url) # Use explicit default port for comparison when none is given if self.port and not port: port = port_by_scheme.get(scheme) elif not self.port and port == port_by_scheme.get(scheme): port = None return (scheme, host, port) == (self.scheme, self.host, self.port) def urlopen(self, method, url, body=None, headers=None, retries=3, redirect=True, assert_same_host=True, timeout=_Default, pool_timeout=None, release_conn=None, **response_kw): """ Get a connection from the pool and perform an HTTP request. This is the lowest level call for making a request, so you'll need to specify all the raw details. .. note:: More commonly, it's appropriate to use a convenience method provided by :class:`.RequestMethods`, such as :meth:`request`. .. note:: `release_conn` will only behave as expected if `preload_content=False` because we want to make `preload_content=False` the default behaviour someday soon without breaking backwards compatibility. :param method: HTTP request method (such as GET, POST, PUT, etc.) :param body: Data to send in the request body (useful for creating POST requests, see HTTPConnectionPool.post_url for more convenience). :param headers: Dictionary of custom headers to send, such as User-Agent, If-None-Match, etc. If None, pool headers are used. If provided, these headers completely replace any pool-specific headers. :param retries: Number of retries to allow before raising a MaxRetryError exception. :param redirect: If True, automatically handle redirects (status codes 301, 302, 303, 307, 308). Each redirect counts as a retry. :param assert_same_host: If ``True``, will make sure that the host of the pool requests is consistent else will raise HostChangedError. When False, you can use the pool on an HTTP proxy and request foreign hosts. :param timeout: If specified, overrides the default timeout for this one request. It may be a float (in seconds) or an instance of :class:`urllib3.util.Timeout`. :param pool_timeout: If set and the pool is set to block=True, then this method will block for ``pool_timeout`` seconds and raise EmptyPoolError if no connection is available within the time period. :param release_conn: If False, then the urlopen call will not release the connection back into the pool once a response is received (but will release if you read the entire contents of the response such as when `preload_content=True`). This is useful if you're not preloading the response's content immediately. You will need to call ``r.release_conn()`` on the response ``r`` to return the connection back into the pool. If None, it takes the value of ``response_kw.get('preload_content', True)``. :param \**response_kw: Additional parameters are passed to :meth:`urllib3.response.HTTPResponse.from_httplib` """ if headers is None: headers = self.headers if retries < 0: raise MaxRetryError(self, url) if release_conn is None: release_conn = response_kw.get('preload_content', True) # Check host if assert_same_host and not self.is_same_host(url): raise HostChangedError(self, url, retries - 1) conn = None # Merge the proxy headers. Only do this in HTTP. We have to copy the # headers dict so we can safely change it without those changes being # reflected in anyone else's copy. if self.scheme == 'http': headers = headers.copy() headers.update(self.proxy_headers) try: # Request a connection from the queue conn = self._get_conn(timeout=pool_timeout) # Make the request on the httplib connection object httplib_response = self._make_request(conn, method, url, timeout=timeout, body=body, headers=headers) # If we're going to release the connection in ``finally:``, then # the request doesn't need to know about the connection. Otherwise # it will also try to release it and we'll have a double-release # mess. response_conn = not release_conn and conn # Import httplib's response into our own wrapper object response = HTTPResponse.from_httplib(httplib_response, pool=self, connection=response_conn, **response_kw) # else: # The connection will be put back into the pool when # ``response.release_conn()`` is called (implicitly by # ``response.read()``) except Empty: # Timed out by queue raise EmptyPoolError(self, "No pool connections are available.") except BaseSSLError as e: raise SSLError(e) except CertificateError as e: # Name mismatch raise SSLError(e) except TimeoutError as e: # Connection broken, discard. conn = None # Save the error off for retry logic. err = e if retries == 0: raise except (HTTPException, SocketError) as e: # Connection broken, discard. It will be replaced next _get_conn(). conn = None # This is necessary so we can access e below err = e if retries == 0: if isinstance(e, SocketError) and self.proxy is not None: raise ProxyError('Cannot connect to proxy. ' 'Socket error: %s.' % e) else: raise MaxRetryError(self, url, e) finally: if release_conn: # Put the connection back to be reused. If the connection is # expired then it will be None, which will get replaced with a # fresh connection during _get_conn. self._put_conn(conn) if not conn: # Try again log.warn("Retrying (%d attempts remain) after connection " "broken by '%r': %s" % (retries, err, url)) return self.urlopen(method, url, body, headers, retries - 1, redirect, assert_same_host, timeout=timeout, pool_timeout=pool_timeout, release_conn=release_conn, **response_kw) # Handle redirect? redirect_location = redirect and response.get_redirect_location() if redirect_location: if response.status == 303: method = 'GET' log.info("Redirecting %s -> %s" % (url, redirect_location)) return self.urlopen(method, redirect_location, body, headers, retries - 1, redirect, assert_same_host, timeout=timeout, pool_timeout=pool_timeout, release_conn=release_conn, **response_kw) return response class HTTPSConnectionPool(HTTPConnectionPool): """ Same as :class:`.HTTPConnectionPool`, but HTTPS. When Python is compiled with the :mod:`ssl` module, then :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates, instead of :class:`.HTTPSConnection`. :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``, ``assert_hostname`` and ``host`` in this order to verify connections. If ``assert_hostname`` is False, no verification is done. The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs`` and ``ssl_version`` are only used if :mod:`ssl` is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade the connection socket into an SSL socket. """ scheme = 'https' ConnectionCls = HTTPSConnection def __init__(self, host, port=None, strict=False, timeout=None, maxsize=1, block=False, headers=None, _proxy=None, _proxy_headers=None, key_file=None, cert_file=None, cert_reqs=None, ca_certs=None, ssl_version=None, assert_hostname=None, assert_fingerprint=None): HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize, block, headers, _proxy, _proxy_headers) self.key_file = key_file self.cert_file = cert_file self.cert_reqs = cert_reqs self.ca_certs = ca_certs self.ssl_version = ssl_version self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint def _prepare_conn(self, conn): """ Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket` and establish the tunnel if proxy is used. """ if isinstance(conn, VerifiedHTTPSConnection): conn.set_cert(key_file=self.key_file, cert_file=self.cert_file, cert_reqs=self.cert_reqs, ca_certs=self.ca_certs, assert_hostname=self.assert_hostname, assert_fingerprint=self.assert_fingerprint) conn.ssl_version = self.ssl_version if self.proxy is not None: # Python 2.7+ try: set_tunnel = conn.set_tunnel except AttributeError: # Platform-specific: Python 2.6 set_tunnel = conn._set_tunnel set_tunnel(self.host, self.port, self.proxy_headers) # Establish tunnel connection early, because otherwise httplib # would improperly set Host: header to proxy's IP:port. conn.connect() return conn def _new_conn(self): """ Return a fresh :class:`httplib.HTTPSConnection`. """ self.num_connections += 1 log.info("Starting new HTTPS connection (%d): %s" % (self.num_connections, self.host)) if not self.ConnectionCls or self.ConnectionCls is DummyConnection: # Platform-specific: Python without ssl raise SSLError("Can't connect to HTTPS URL because the SSL " "module is not available.") actual_host = self.host actual_port = self.port if self.proxy is not None: actual_host = self.proxy.host actual_port = self.proxy.port extra_params = {} if not six.PY3: # Python 2 extra_params['strict'] = self.strict conn = self.ConnectionCls(host=actual_host, port=actual_port, timeout=self.timeout.connect_timeout, **extra_params) if self.proxy is not None: # Enable Nagle's algorithm for proxies, to avoid packet # fragmentation. conn.tcp_nodelay = 0 return self._prepare_conn(conn) def connection_from_url(url, **kw): """ Given a url, return an :class:`.ConnectionPool` instance of its host. This is a shortcut for not having to parse out the scheme, host, and port of the url before creating an :class:`.ConnectionPool` instance. :param url: Absolute URL string that must include the scheme. Port is optional. :param \**kw: Passes additional parameters to the constructor of the appropriate :class:`.ConnectionPool`. Useful for specifying things like timeout, maxsize, headers, etc. Example: :: >>> conn = connection_from_url('http://google.com/') >>> r = conn.request('GET', '/') """ scheme, host, port = get_host(url) if scheme == 'https': return HTTPSConnectionPool(host, port=port, **kw) else: return HTTPConnectionPool(host, port=port, **kw)
gpl-3.0
kevclarx/ansible
lib/ansible/template/vars.py
35
3911
# (c) 2012, Michael DeHaan <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type from jinja2.utils import missing from ansible.module_utils.six import iteritems from ansible.module_utils._text import to_native __all__ = ['AnsibleJ2Vars'] class AnsibleJ2Vars: ''' Helper class to template all variable content before jinja2 sees it. This is done by hijacking the variable storage that jinja2 uses, and overriding __contains__ and __getitem__ to look like a dict. Added bonus is avoiding duplicating the large hashes that inject tends to be. To facilitate using builtin jinja2 things like range, globals are also handled here. ''' def __init__(self, templar, globals, locals=None, *extras): ''' Initializes this object with a valid Templar() object, as well as several dictionaries of variables representing different scopes (in jinja2 terminology). ''' self._templar = templar self._globals = globals self._extras = extras self._locals = dict() if isinstance(locals, dict): for key, val in iteritems(locals): if val is not missing: if key[:2] == 'l_': self._locals[key[2:]] = val elif key not in ('context', 'environment', 'template'): self._locals[key] = val def __contains__(self, k): if k in self._templar._available_variables: return True if k in self._locals: return True for i in self._extras: if k in i: return True if k in self._globals: return True return False def __getitem__(self, varname): if varname not in self._templar._available_variables: if varname in self._locals: return self._locals[varname] for i in self._extras: if varname in i: return i[varname] if varname in self._globals: return self._globals[varname] else: raise KeyError("undefined variable: %s" % varname) variable = self._templar._available_variables[varname] # HostVars is special, return it as-is, as is the special variable # 'vars', which contains the vars structure from ansible.vars.hostvars import HostVars if isinstance(variable, dict) and varname == "vars" or isinstance(variable, HostVars) or hasattr(variable, '__UNSAFE__'): return variable else: value = None try: value = self._templar.template(variable) except Exception as e: raise type(e)(to_native(variable) + ': ' + e.message) return value def add_locals(self, locals): ''' If locals are provided, create a copy of self containing those locals in addition to what is already in this variable proxy. ''' if locals is None: return self return AnsibleJ2Vars(self._templar, self._globals, locals=locals, *self._extras)
gpl-3.0
admcrae/tensorflow
tensorflow/contrib/keras/python/keras/__init__.py
29
1864
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """The Keras API. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.contrib.keras.python.keras import activations from tensorflow.contrib.keras.python.keras import applications from tensorflow.contrib.keras.python.keras import backend from tensorflow.contrib.keras.python.keras import callbacks from tensorflow.contrib.keras.python.keras import constraints from tensorflow.contrib.keras.python.keras import datasets from tensorflow.contrib.keras.python.keras import engine from tensorflow.contrib.keras.python.keras import initializers from tensorflow.contrib.keras.python.keras import layers from tensorflow.contrib.keras.python.keras import losses from tensorflow.contrib.keras.python.keras import metrics from tensorflow.contrib.keras.python.keras import models from tensorflow.contrib.keras.python.keras import optimizers from tensorflow.contrib.keras.python.keras import preprocessing from tensorflow.contrib.keras.python.keras import regularizers from tensorflow.contrib.keras.python.keras import utils from tensorflow.contrib.keras.python.keras import wrappers __version__ = '2.0.2-tf'
apache-2.0
sasukeh/acos-client
acos_client/v21/device_info.py
4
1044
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import base class DeviceInfo(base.BaseV21): def get(self, **kwargs): return self._get('system.device_info.get', **kwargs) def cpu_current_usage(self, **kwargs): return self._get('system.device_info.cpu.current_usage.get', **kwargs) def cpu_historical_usage(self, **kwargs): return self._get('system.device_info.cpu.historical_usage.get', **kwargs)
apache-2.0
vigneshkarthi/satireguru
satire-bot.py
1
3178
import twitter import yaml import time import pickle import re global match, api, msg, oldID import random msg = '' #RegEx for parsing twitter handle from retrived keyword = ''; #UTF_CHARS = ur'a-z0-9_\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u00ff' #TAG_EXP = ur'(^|[^0-9A-Z&/]+)(#|\uff03)([0-9A-Z_]*[A-Z_]+[%s]*)' % UTF_CHARS #TAG_REGEX = re.compile(TAG_EXP, re.UNICODE | re.IGNORECASE) #Performs OAuth authentication, place all the neccessary keys in access.yaml def authenticate(): global api data = yaml.load(open("access.yaml")) api = twitter.Api(consumer_key=data['consumer-key'],consumer_secret=data['consumer-secret'],access_token_key=data['access-key'],access_token_secret=data['access-secret']) #Parses response.yaml to search and reply with relevant messages according to twitterhandles, fill your responses in response.yaml def choose_reply(): global match, msg comments = yaml.load(open("response.yaml")) for name in comments['name']: if(name['keyword']==match): msg = random.choice(name['response']) #Module which checks for mentions and replies to the mentioner and the person mentioned #current version supports only one mentioned person def get_and_post_replies(old): cache_msg_to_post = ' ' global match, api while(1): try: i = 0 repl = api.GetMentions() total = len(repl) newID = int(repl[i].id) while(newID != old): print repl[i].text+", by @"+repl[i].user.screen_name if "pm" in repl[i].text.lower(): match = 'PM' print "Match is", match choose_reply() msg_to_post = "@"+repl[i].user.screen_name+" "+msg if(msg_to_post == cache_msg_to_post): msg_to_post = msg_to_post + random.randint(0,1000) cache_msg_to_post = msg_to_post try: api.PostUpdate(msg_to_post, in_reply_to_status_id=repl[i].id) print "Msg posted is", msg_to_post i = i+1 if (total == i): break newID = int(repl[i].id) except twitter.TwitterError: print "Something happend.. Saving ID's to file.. Not to Worry" fileObj = open("idstore",'r+') old = repl[0].id fileObj.seek(0) fileObj.write(str(old)) fileObj.close() return else: i = i + 1 if (total == i): break newId = int(repl[i].id) old = int(repl[0].id) print "No New Tweets !!" print "Gonna sleep for a minute :)" time.sleep(60) except KeyboardInterrupt: fileObj = open("idstore", 'r+') fileObj.seek(0) fileObj.write(str(old)) print "Saving ID's to file.. Exiting!!" return authenticate() fileObj = open("idstore",'r+') old = fileObj.read() old = int(old) get_and_post_replies(old)
gpl-2.0
fuselock/odoo
addons/mrp_repair/mrp_repair.py
148
36935
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv from datetime import datetime from openerp.tools.translate import _ import openerp.addons.decimal_precision as dp class mrp_repair(osv.osv): _name = 'mrp.repair' _inherit = 'mail.thread' _description = 'Repair Order' def _amount_untaxed(self, cr, uid, ids, field_name, arg, context=None): """ Calculates untaxed amount. @param self: The object pointer @param cr: The current row, from the database cursor, @param uid: The current user ID for security checks @param ids: List of selected IDs @param field_name: Name of field. @param arg: Argument @param context: A standard dictionary for contextual values @return: Dictionary of values. """ res = {} cur_obj = self.pool.get('res.currency') for repair in self.browse(cr, uid, ids, context=context): res[repair.id] = 0.0 for line in repair.operations: res[repair.id] += line.price_subtotal for line in repair.fees_lines: res[repair.id] += line.price_subtotal cur = repair.pricelist_id.currency_id res[repair.id] = cur_obj.round(cr, uid, cur, res[repair.id]) return res def _amount_tax(self, cr, uid, ids, field_name, arg, context=None): """ Calculates taxed amount. @param field_name: Name of field. @param arg: Argument @return: Dictionary of values. """ res = {} #return {}.fromkeys(ids, 0) cur_obj = self.pool.get('res.currency') tax_obj = self.pool.get('account.tax') for repair in self.browse(cr, uid, ids, context=context): val = 0.0 cur = repair.pricelist_id.currency_id for line in repair.operations: #manage prices with tax included use compute_all instead of compute if line.to_invoice: tax_calculate = tax_obj.compute_all(cr, uid, line.tax_id, line.price_unit, line.product_uom_qty, line.product_id, repair.partner_id) for c in tax_calculate['taxes']: val += c['amount'] for line in repair.fees_lines: if line.to_invoice: tax_calculate = tax_obj.compute_all(cr, uid, line.tax_id, line.price_unit, line.product_uom_qty, line.product_id, repair.partner_id) for c in tax_calculate['taxes']: val += c['amount'] res[repair.id] = cur_obj.round(cr, uid, cur, val) return res def _amount_total(self, cr, uid, ids, field_name, arg, context=None): """ Calculates total amount. @param field_name: Name of field. @param arg: Argument @return: Dictionary of values. """ res = {} untax = self._amount_untaxed(cr, uid, ids, field_name, arg, context=context) tax = self._amount_tax(cr, uid, ids, field_name, arg, context=context) cur_obj = self.pool.get('res.currency') for id in ids: repair = self.browse(cr, uid, id, context=context) cur = repair.pricelist_id.currency_id res[id] = cur_obj.round(cr, uid, cur, untax.get(id, 0.0) + tax.get(id, 0.0)) return res def _get_default_address(self, cr, uid, ids, field_name, arg, context=None): res = {} partner_obj = self.pool.get('res.partner') for data in self.browse(cr, uid, ids, context=context): adr_id = False if data.partner_id: adr_id = partner_obj.address_get(cr, uid, [data.partner_id.id], ['default'])['default'] res[data.id] = adr_id return res def _get_lines(self, cr, uid, ids, context=None): return self.pool['mrp.repair'].search(cr, uid, [('operations', 'in', ids)], context=context) def _get_fee_lines(self, cr, uid, ids, context=None): return self.pool['mrp.repair'].search(cr, uid, [('fees_lines', 'in', ids)], context=context) _columns = { 'name': fields.char('Repair Reference', required=True, states={'confirmed': [('readonly', True)]}, copy=False), 'product_id': fields.many2one('product.product', string='Product to Repair', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'product_qty': fields.float('Product Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True, readonly=True, states={'draft': [('readonly', False)]}), 'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'partner_id': fields.many2one('res.partner', 'Partner', select=True, help='Choose partner for whom the order will be invoiced and delivered.', states={'confirmed': [('readonly', True)]}), 'address_id': fields.many2one('res.partner', 'Delivery Address', domain="[('parent_id','=',partner_id)]", states={'confirmed': [('readonly', True)]}), 'default_address_id': fields.function(_get_default_address, type="many2one", relation="res.partner"), 'state': fields.selection([ ('draft', 'Quotation'), ('cancel', 'Cancelled'), ('confirmed', 'Confirmed'), ('under_repair', 'Under Repair'), ('ready', 'Ready to Repair'), ('2binvoiced', 'To be Invoiced'), ('invoice_except', 'Invoice Exception'), ('done', 'Repaired') ], 'Status', readonly=True, track_visibility='onchange', copy=False, help=' * The \'Draft\' status is used when a user is encoding a new and unconfirmed repair order. \ \n* The \'Confirmed\' status is used when a user confirms the repair order. \ \n* The \'Ready to Repair\' status is used to start to repairing, user can start repairing only after repair order is confirmed. \ \n* The \'To be Invoiced\' status is used to generate the invoice before or after repairing done. \ \n* The \'Done\' status is set when repairing is completed.\ \n* The \'Cancelled\' status is used when user cancel repair order.'), 'location_id': fields.many2one('stock.location', 'Current Location', select=True, required=True, readonly=True, states={'draft': [('readonly', False)], 'confirmed': [('readonly', True)]}), 'location_dest_id': fields.many2one('stock.location', 'Delivery Location', readonly=True, required=True, states={'draft': [('readonly', False)], 'confirmed': [('readonly', True)]}), 'lot_id': fields.many2one('stock.production.lot', 'Repaired Lot', domain="[('product_id','=', product_id)]", help="Products repaired are all belonging to this lot", oldname="prodlot_id"), 'guarantee_limit': fields.date('Warranty Expiration', states={'confirmed': [('readonly', True)]}), 'operations': fields.one2many('mrp.repair.line', 'repair_id', 'Operation Lines', readonly=True, states={'draft': [('readonly', False)]}, copy=True), 'pricelist_id': fields.many2one('product.pricelist', 'Pricelist', help='Pricelist of the selected partner.'), 'partner_invoice_id': fields.many2one('res.partner', 'Invoicing Address'), 'invoice_method': fields.selection([ ("none", "No Invoice"), ("b4repair", "Before Repair"), ("after_repair", "After Repair") ], "Invoice Method", select=True, required=True, states={'draft': [('readonly', False)]}, readonly=True, help='Selecting \'Before Repair\' or \'After Repair\' will allow you to generate invoice before or after the repair is done respectively. \'No invoice\' means you don\'t want to generate invoice for this repair order.'), 'invoice_id': fields.many2one('account.invoice', 'Invoice', readonly=True, track_visibility="onchange", copy=False), 'move_id': fields.many2one('stock.move', 'Move', readonly=True, help="Move created by the repair order", track_visibility="onchange", copy=False), 'fees_lines': fields.one2many('mrp.repair.fee', 'repair_id', 'Fees', readonly=True, states={'draft': [('readonly', False)]}, copy=True), 'internal_notes': fields.text('Internal Notes'), 'quotation_notes': fields.text('Quotation Notes'), 'company_id': fields.many2one('res.company', 'Company'), 'invoiced': fields.boolean('Invoiced', readonly=True, copy=False), 'repaired': fields.boolean('Repaired', readonly=True, copy=False), 'amount_untaxed': fields.function(_amount_untaxed, string='Untaxed Amount', store={ 'mrp.repair': (lambda self, cr, uid, ids, c={}: ids, ['operations', 'fees_lines'], 10), 'mrp.repair.line': (_get_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10), 'mrp.repair.fee': (_get_fee_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10), }), 'amount_tax': fields.function(_amount_tax, string='Taxes', store={ 'mrp.repair': (lambda self, cr, uid, ids, c={}: ids, ['operations', 'fees_lines'], 10), 'mrp.repair.line': (_get_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10), 'mrp.repair.fee': (_get_fee_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10), }), 'amount_total': fields.function(_amount_total, string='Total', store={ 'mrp.repair': (lambda self, cr, uid, ids, c={}: ids, ['operations', 'fees_lines'], 10), 'mrp.repair.line': (_get_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10), 'mrp.repair.fee': (_get_fee_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10), }), } def _default_stock_location(self, cr, uid, context=None): try: warehouse = self.pool.get('ir.model.data').get_object(cr, uid, 'stock', 'warehouse0') return warehouse.lot_stock_id.id except: return False _defaults = { 'state': lambda *a: 'draft', 'name': lambda obj, cr, uid, context: obj.pool.get('ir.sequence').get(cr, uid, 'mrp.repair'), 'invoice_method': lambda *a: 'none', 'company_id': lambda self, cr, uid, context: self.pool.get('res.company')._company_default_get(cr, uid, 'mrp.repair', context=context), 'pricelist_id': lambda self, cr, uid, context: self.pool.get('product.pricelist').search(cr, uid, [('type', '=', 'sale')])[0], 'product_qty': 1.0, 'location_id': _default_stock_location, } _sql_constraints = [ ('name', 'unique (name)', 'The name of the Repair Order must be unique!'), ] def onchange_product_id(self, cr, uid, ids, product_id=None): """ On change of product sets some values. @param product_id: Changed product @return: Dictionary of values. """ product = False if product_id: product = self.pool.get("product.product").browse(cr, uid, product_id) return {'value': { 'guarantee_limit': False, 'lot_id': False, 'product_uom': product and product.uom_id.id or False, } } def onchange_product_uom(self, cr, uid, ids, product_id, product_uom, context=None): res = {'value': {}} if not product_uom or not product_id: return res product = self.pool.get('product.product').browse(cr, uid, product_id, context=context) uom = self.pool.get('product.uom').browse(cr, uid, product_uom, context=context) if uom.category_id.id != product.uom_id.category_id.id: res['warning'] = {'title': _('Warning'), 'message': _('The Product Unit of Measure you chose has a different category than in the product form.')} res['value'].update({'product_uom': product.uom_id.id}) return res def onchange_location_id(self, cr, uid, ids, location_id=None): """ On change of location """ return {'value': {'location_dest_id': location_id}} def button_dummy(self, cr, uid, ids, context=None): return True def onchange_partner_id(self, cr, uid, ids, part, address_id): """ On change of partner sets the values of partner address, partner invoice address and pricelist. @param part: Changed id of partner. @param address_id: Address id from current record. @return: Dictionary of values. """ part_obj = self.pool.get('res.partner') pricelist_obj = self.pool.get('product.pricelist') if not part: return {'value': { 'address_id': False, 'partner_invoice_id': False, 'pricelist_id': pricelist_obj.search(cr, uid, [('type', '=', 'sale')])[0] } } addr = part_obj.address_get(cr, uid, [part], ['delivery', 'invoice', 'default']) partner = part_obj.browse(cr, uid, part) pricelist = partner.property_product_pricelist and partner.property_product_pricelist.id or False return {'value': { 'address_id': addr['delivery'] or addr['default'], 'partner_invoice_id': addr['invoice'], 'pricelist_id': pricelist } } def action_cancel_draft(self, cr, uid, ids, *args): """ Cancels repair order when it is in 'Draft' state. @param *arg: Arguments @return: True """ if not len(ids): return False mrp_line_obj = self.pool.get('mrp.repair.line') for repair in self.browse(cr, uid, ids): mrp_line_obj.write(cr, uid, [l.id for l in repair.operations], {'state': 'draft'}) self.write(cr, uid, ids, {'state': 'draft'}) return self.create_workflow(cr, uid, ids) def action_confirm(self, cr, uid, ids, *args): """ Repair order state is set to 'To be invoiced' when invoice method is 'Before repair' else state becomes 'Confirmed'. @param *arg: Arguments @return: True """ mrp_line_obj = self.pool.get('mrp.repair.line') for o in self.browse(cr, uid, ids): if (o.invoice_method == 'b4repair'): self.write(cr, uid, [o.id], {'state': '2binvoiced'}) else: self.write(cr, uid, [o.id], {'state': 'confirmed'}) for line in o.operations: if line.product_id.track_production and not line.lot_id: raise osv.except_osv(_('Warning!'), _("Serial number is required for operation line with product '%s'") % (line.product_id.name)) mrp_line_obj.write(cr, uid, [l.id for l in o.operations], {'state': 'confirmed'}) return True def action_cancel(self, cr, uid, ids, context=None): """ Cancels repair order. @return: True """ mrp_line_obj = self.pool.get('mrp.repair.line') for repair in self.browse(cr, uid, ids, context=context): if not repair.invoiced: mrp_line_obj.write(cr, uid, [l.id for l in repair.operations], {'state': 'cancel'}, context=context) else: raise osv.except_osv(_('Warning!'), _('Repair order is already invoiced.')) return self.write(cr, uid, ids, {'state': 'cancel'}) def wkf_invoice_create(self, cr, uid, ids, *args): self.action_invoice_create(cr, uid, ids) return True def action_invoice_create(self, cr, uid, ids, group=False, context=None): """ Creates invoice(s) for repair order. @param group: It is set to true when group invoice is to be generated. @return: Invoice Ids. """ res = {} invoices_group = {} inv_line_obj = self.pool.get('account.invoice.line') inv_obj = self.pool.get('account.invoice') repair_line_obj = self.pool.get('mrp.repair.line') repair_fee_obj = self.pool.get('mrp.repair.fee') for repair in self.browse(cr, uid, ids, context=context): res[repair.id] = False if repair.state in ('draft', 'cancel') or repair.invoice_id: continue if not (repair.partner_id.id and repair.partner_invoice_id.id): raise osv.except_osv(_('No partner!'), _('You have to select a Partner Invoice Address in the repair form!')) comment = repair.quotation_notes if (repair.invoice_method != 'none'): if group and repair.partner_invoice_id.id in invoices_group: inv_id = invoices_group[repair.partner_invoice_id.id] invoice = inv_obj.browse(cr, uid, inv_id) invoice_vals = { 'name': invoice.name + ', ' + repair.name, 'origin': invoice.origin + ', ' + repair.name, 'comment': (comment and (invoice.comment and invoice.comment + "\n" + comment or comment)) or (invoice.comment and invoice.comment or ''), } inv_obj.write(cr, uid, [inv_id], invoice_vals, context=context) else: if not repair.partner_id.property_account_receivable: raise osv.except_osv(_('Error!'), _('No account defined for partner "%s".') % repair.partner_id.name) account_id = repair.partner_id.property_account_receivable.id inv = { 'name': repair.name, 'origin': repair.name, 'type': 'out_invoice', 'account_id': account_id, 'partner_id': repair.partner_invoice_id.id or repair.partner_id.id, 'currency_id': repair.pricelist_id.currency_id.id, 'comment': repair.quotation_notes, 'fiscal_position': repair.partner_id.property_account_position.id } inv_id = inv_obj.create(cr, uid, inv) invoices_group[repair.partner_invoice_id.id] = inv_id self.write(cr, uid, repair.id, {'invoiced': True, 'invoice_id': inv_id}) for operation in repair.operations: if operation.to_invoice: if group: name = repair.name + '-' + operation.name else: name = operation.name if operation.product_id.property_account_income: account_id = operation.product_id.property_account_income.id elif operation.product_id.categ_id.property_account_income_categ: account_id = operation.product_id.categ_id.property_account_income_categ.id else: raise osv.except_osv(_('Error!'), _('No account defined for product "%s".') % operation.product_id.name) invoice_line_id = inv_line_obj.create(cr, uid, { 'invoice_id': inv_id, 'name': name, 'origin': repair.name, 'account_id': account_id, 'quantity': operation.product_uom_qty, 'invoice_line_tax_id': [(6, 0, [x.id for x in operation.tax_id])], 'uos_id': operation.product_uom.id, 'price_unit': operation.price_unit, 'price_subtotal': operation.product_uom_qty * operation.price_unit, 'product_id': operation.product_id and operation.product_id.id or False }) repair_line_obj.write(cr, uid, [operation.id], {'invoiced': True, 'invoice_line_id': invoice_line_id}) for fee in repair.fees_lines: if fee.to_invoice: if group: name = repair.name + '-' + fee.name else: name = fee.name if not fee.product_id: raise osv.except_osv(_('Warning!'), _('No product defined on Fees!')) if fee.product_id.property_account_income: account_id = fee.product_id.property_account_income.id elif fee.product_id.categ_id.property_account_income_categ: account_id = fee.product_id.categ_id.property_account_income_categ.id else: raise osv.except_osv(_('Error!'), _('No account defined for product "%s".') % fee.product_id.name) invoice_fee_id = inv_line_obj.create(cr, uid, { 'invoice_id': inv_id, 'name': name, 'origin': repair.name, 'account_id': account_id, 'quantity': fee.product_uom_qty, 'invoice_line_tax_id': [(6, 0, [x.id for x in fee.tax_id])], 'uos_id': fee.product_uom.id, 'product_id': fee.product_id and fee.product_id.id or False, 'price_unit': fee.price_unit, 'price_subtotal': fee.product_uom_qty * fee.price_unit }) repair_fee_obj.write(cr, uid, [fee.id], {'invoiced': True, 'invoice_line_id': invoice_fee_id}) inv_obj.button_reset_taxes(cr, uid, inv_id, context=context) res[repair.id] = inv_id return res def action_repair_ready(self, cr, uid, ids, context=None): """ Writes repair order state to 'Ready' @return: True """ for repair in self.browse(cr, uid, ids, context=context): self.pool.get('mrp.repair.line').write(cr, uid, [l.id for l in repair.operations], {'state': 'confirmed'}, context=context) self.write(cr, uid, [repair.id], {'state': 'ready'}) return True def action_repair_start(self, cr, uid, ids, context=None): """ Writes repair order state to 'Under Repair' @return: True """ repair_line = self.pool.get('mrp.repair.line') for repair in self.browse(cr, uid, ids, context=context): repair_line.write(cr, uid, [l.id for l in repair.operations], {'state': 'confirmed'}, context=context) repair.write({'state': 'under_repair'}) return True def action_repair_end(self, cr, uid, ids, context=None): """ Writes repair order state to 'To be invoiced' if invoice method is After repair else state is set to 'Ready'. @return: True """ for order in self.browse(cr, uid, ids, context=context): val = {} val['repaired'] = True if (not order.invoiced and order.invoice_method == 'after_repair'): val['state'] = '2binvoiced' elif (not order.invoiced and order.invoice_method == 'b4repair'): val['state'] = 'ready' else: pass self.write(cr, uid, [order.id], val) return True def wkf_repair_done(self, cr, uid, ids, *args): self.action_repair_done(cr, uid, ids) return True def action_repair_done(self, cr, uid, ids, context=None): """ Creates stock move for operation and stock move for final product of repair order. @return: Move ids of final products """ res = {} move_obj = self.pool.get('stock.move') repair_line_obj = self.pool.get('mrp.repair.line') for repair in self.browse(cr, uid, ids, context=context): move_ids = [] for move in repair.operations: move_id = move_obj.create(cr, uid, { 'name': move.name, 'product_id': move.product_id.id, 'restrict_lot_id': move.lot_id.id, 'product_uom_qty': move.product_uom_qty, 'product_uom': move.product_uom.id, 'partner_id': repair.address_id and repair.address_id.id or False, 'location_id': move.location_id.id, 'location_dest_id': move.location_dest_id.id, }) move_ids.append(move_id) repair_line_obj.write(cr, uid, [move.id], {'move_id': move_id, 'state': 'done'}, context=context) move_id = move_obj.create(cr, uid, { 'name': repair.name, 'product_id': repair.product_id.id, 'product_uom': repair.product_uom.id or repair.product_id.uom_id.id, 'product_uom_qty': repair.product_qty, 'partner_id': repair.address_id and repair.address_id.id or False, 'location_id': repair.location_id.id, 'location_dest_id': repair.location_dest_id.id, 'restrict_lot_id': repair.lot_id.id, }) move_ids.append(move_id) move_obj.action_done(cr, uid, move_ids, context=context) self.write(cr, uid, [repair.id], {'state': 'done', 'move_id': move_id}, context=context) res[repair.id] = move_id return res class ProductChangeMixin(object): def product_id_change(self, cr, uid, ids, pricelist, product, uom=False, product_uom_qty=0, partner_id=False, guarantee_limit=False, context=None): """ On change of product it sets product quantity, tax account, name, uom of product, unit price and price subtotal. @param pricelist: Pricelist of current record. @param product: Changed id of product. @param uom: UoM of current record. @param product_uom_qty: Quantity of current record. @param partner_id: Partner of current record. @param guarantee_limit: Guarantee limit of current record. @return: Dictionary of values and warning message. """ result = {} warning = {} ctx = context and context.copy() or {} ctx['uom'] = uom if not product_uom_qty: product_uom_qty = 1 result['product_uom_qty'] = product_uom_qty if product: product_obj = self.pool.get('product.product').browse(cr, uid, product, context=ctx) if partner_id: partner = self.pool.get('res.partner').browse(cr, uid, partner_id) result['tax_id'] = self.pool.get('account.fiscal.position').map_tax(cr, uid, partner.property_account_position, product_obj.taxes_id, context=ctx) result['name'] = product_obj.display_name result['product_uom'] = product_obj.uom_id and product_obj.uom_id.id or False if not pricelist: warning = { 'title': _('No Pricelist!'), 'message': _('You have to select a pricelist in the Repair form !\n' 'Please set one before choosing a product.') } else: price = self.pool.get('product.pricelist').price_get(cr, uid, [pricelist], product, product_uom_qty, partner_id, context=ctx)[pricelist] if price is False: warning = { 'title': _('No valid pricelist line found !'), 'message': _("Couldn't find a pricelist line matching this product and quantity.\n" "You have to change either the product, the quantity or the pricelist.") } else: result.update({'price_unit': price, 'price_subtotal': price * product_uom_qty}) return {'value': result, 'warning': warning} class mrp_repair_line(osv.osv, ProductChangeMixin): _name = 'mrp.repair.line' _description = 'Repair Line' def _amount_line(self, cr, uid, ids, field_name, arg, context=None): """ Calculates amount. @param field_name: Name of field. @param arg: Argument @return: Dictionary of values. """ res = {} tax_obj = self.pool.get('account.tax') cur_obj = self.pool.get('res.currency') for line in self.browse(cr, uid, ids, context=context): if line.to_invoice: taxes = tax_obj.compute_all(cr, uid, line.tax_id, line.price_unit, line.product_uom_qty, line.product_id, line.repair_id.partner_id) cur = line.repair_id.pricelist_id.currency_id res[line.id] = cur_obj.round(cr, uid, cur, taxes['total']) else: res[line.id] = 0 return res _columns = { 'name': fields.char('Description', required=True), 'repair_id': fields.many2one('mrp.repair', 'Repair Order Reference', ondelete='cascade', select=True), 'type': fields.selection([('add', 'Add'), ('remove', 'Remove')], 'Type', required=True), 'to_invoice': fields.boolean('To Invoice'), 'product_id': fields.many2one('product.product', 'Product', required=True), 'invoiced': fields.boolean('Invoiced', readonly=True, copy=False), 'price_unit': fields.float('Unit Price', required=True, digits_compute=dp.get_precision('Product Price')), 'price_subtotal': fields.function(_amount_line, string='Subtotal', digits_compute=dp.get_precision('Account')), 'tax_id': fields.many2many('account.tax', 'repair_operation_line_tax', 'repair_operation_line_id', 'tax_id', 'Taxes'), 'product_uom_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True), 'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True), 'invoice_line_id': fields.many2one('account.invoice.line', 'Invoice Line', readonly=True, copy=False), 'location_id': fields.many2one('stock.location', 'Source Location', required=True, select=True), 'location_dest_id': fields.many2one('stock.location', 'Dest. Location', required=True, select=True), 'move_id': fields.many2one('stock.move', 'Inventory Move', readonly=True, copy=False), 'lot_id': fields.many2one('stock.production.lot', 'Lot'), 'state': fields.selection([ ('draft', 'Draft'), ('confirmed', 'Confirmed'), ('done', 'Done'), ('cancel', 'Cancelled')], 'Status', required=True, readonly=True, copy=False, help=' * The \'Draft\' status is set automatically as draft when repair order in draft status. \ \n* The \'Confirmed\' status is set automatically as confirm when repair order in confirm status. \ \n* The \'Done\' status is set automatically when repair order is completed.\ \n* The \'Cancelled\' status is set automatically when user cancel repair order.'), } _defaults = { 'state': lambda *a: 'draft', 'product_uom_qty': lambda *a: 1, } def onchange_operation_type(self, cr, uid, ids, type, guarantee_limit, company_id=False, context=None): """ On change of operation type it sets source location, destination location and to invoice field. @param product: Changed operation type. @param guarantee_limit: Guarantee limit of current record. @return: Dictionary of values. """ if not type: return {'value': { 'location_id': False, 'location_dest_id': False }} location_obj = self.pool.get('stock.location') warehouse_obj = self.pool.get('stock.warehouse') location_id = location_obj.search(cr, uid, [('usage', '=', 'production')], context=context) location_id = location_id and location_id[0] or False if type == 'add': # TOCHECK: Find stock location for user's company warehouse or # repair order's company's warehouse (company_id field is added in fix of lp:831583) args = company_id and [('company_id', '=', company_id)] or [] warehouse_ids = warehouse_obj.search(cr, uid, args, context=context) stock_id = False if warehouse_ids: stock_id = warehouse_obj.browse(cr, uid, warehouse_ids[0], context=context).lot_stock_id.id to_invoice = (guarantee_limit and datetime.strptime(guarantee_limit, '%Y-%m-%d') < datetime.now()) return {'value': { 'to_invoice': to_invoice, 'location_id': stock_id, 'location_dest_id': location_id }} scrap_location_ids = location_obj.search(cr, uid, [('scrap_location', '=', True)], context=context) return {'value': { 'to_invoice': False, 'location_id': location_id, 'location_dest_id': scrap_location_ids and scrap_location_ids[0] or False, }} class mrp_repair_fee(osv.osv, ProductChangeMixin): _name = 'mrp.repair.fee' _description = 'Repair Fees Line' def _amount_line(self, cr, uid, ids, field_name, arg, context=None): """ Calculates amount. @param field_name: Name of field. @param arg: Argument @return: Dictionary of values. """ res = {} tax_obj = self.pool.get('account.tax') cur_obj = self.pool.get('res.currency') for line in self.browse(cr, uid, ids, context=context): if line.to_invoice: taxes = tax_obj.compute_all(cr, uid, line.tax_id, line.price_unit, line.product_uom_qty, line.product_id, line.repair_id.partner_id) cur = line.repair_id.pricelist_id.currency_id res[line.id] = cur_obj.round(cr, uid, cur, taxes['total']) else: res[line.id] = 0 return res _columns = { 'repair_id': fields.many2one('mrp.repair', 'Repair Order Reference', required=True, ondelete='cascade', select=True), 'name': fields.char('Description', select=True, required=True), 'product_id': fields.many2one('product.product', 'Product'), 'product_uom_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True), 'price_unit': fields.float('Unit Price', required=True), 'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True), 'price_subtotal': fields.function(_amount_line, string='Subtotal', digits_compute=dp.get_precision('Account')), 'tax_id': fields.many2many('account.tax', 'repair_fee_line_tax', 'repair_fee_line_id', 'tax_id', 'Taxes'), 'invoice_line_id': fields.many2one('account.invoice.line', 'Invoice Line', readonly=True, copy=False), 'to_invoice': fields.boolean('To Invoice'), 'invoiced': fields.boolean('Invoiced', readonly=True, copy=False), } _defaults = { 'to_invoice': lambda *a: True, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
9miao/Firefly
gfirefly/server/server.py
6
4947
#coding:utf8 ''' Created on 2013-8-2 @author: lan (www.9miao.com) ''' from gfirefly.netconnect.protoc import LiberateFactory from flask import Flask from gfirefly.distributed.root import PBRoot,BilateralFactory from gfirefly.distributed.node import RemoteObject from gfirefly.dbentrust.dbpool import dbpool from gfirefly.dbentrust.memclient import mclient from gfirefly.server.logobj import loogoo from gfirefly.server.globalobject import GlobalObject from gtwisted.utils import log from gtwisted.core import reactor from gfirefly.utils import services import os,sys,affinity reactor = reactor def serverStop(): """停止服务进程 """ log.msg('stop') if GlobalObject().stophandler: GlobalObject().stophandler() reactor.callLater(0.5,reactor.stop) return True class FFServer: """抽象出的一个服务进程 """ def __init__(self): ''' ''' self.netfactory = None#net前端 self.root = None#分布式root节点 self.webroot = None#http服务 self.remote = {}#remote节点 self.master_remote = None self.db = None self.mem = None self.servername = None self.remoteportlist = [] def config(self, config, servername=None, dbconfig=None, memconfig=None, masterconf=None): '''配置服务器 ''' GlobalObject().json_config = config netport = config.get('netport')#客户端连接 webport = config.get('webport')#http连接 rootport = config.get('rootport')#root节点配置 self.remoteportlist = config.get('remoteport',[])#remote节点配置列表 if not servername: servername = config.get('name')#服务器名称 logpath = config.get('log')#日志 hasdb = config.get('db')#数据库连接 hasmem = config.get('mem')#memcached连接 app = config.get('app')#入口模块名称 cpuid = config.get('cpu')#绑定cpu mreload = config.get('reload')#重新加载模块名称 self.servername = servername if netport: self.netfactory = LiberateFactory() netservice = services.CommandService("netservice") self.netfactory.addServiceChannel(netservice) reactor.listenTCP(netport,self.netfactory) if webport: self.webroot = Flask("servername") GlobalObject().webroot = self.webroot reactor.listenWSGI(webport, self.webroot) if rootport: self.root = PBRoot() rootservice = services.Service("rootservice") self.root.addServiceChannel(rootservice) reactor.listenTCP(rootport, BilateralFactory(self.root)) for cnf in self.remoteportlist: rname = cnf.get('rootname') self.remote[rname] = RemoteObject(self.servername) if hasdb and dbconfig: log.msg(str(dbconfig)) dbpool.initPool(**dbconfig) if hasmem and memconfig: urls = memconfig.get('urls') hostname = str(memconfig.get('hostname')) mclient.connect(urls, hostname) if logpath: log.addObserver(loogoo(logpath))#日志处理 log.startLogging(sys.stdout) if cpuid: affinity.set_process_affinity_mask(os.getpid(), cpuid) GlobalObject().config(netfactory = self.netfactory, root=self.root, remote = self.remote) if masterconf: masterport = masterconf.get('rootport') masterhost = masterconf.get('roothost') self.master_remote = RemoteObject(servername) addr = ('localhost',masterport) if not masterhost else (masterhost,masterport) self.master_remote.connect(addr) GlobalObject().masterremote = self.master_remote import admin if app: __import__(app) if mreload: _path_list = mreload.split(".") GlobalObject().reloadmodule = __import__(mreload,fromlist=_path_list[:1]) GlobalObject().remote_connect = self.remote_connect def remote_connect(self, rname, rhost): """进行rpc的连接 """ for cnf in self.remoteportlist: _rname = cnf.get('rootname') if rname == _rname: rport = cnf.get('rootport') if not rhost: addr = ('localhost',rport) else: addr = (rhost,rport) self.remote[rname].connect(addr) break def start(self): '''启动服务器 ''' log.msg('[%s] started...'%self.servername) log.msg('[%s] pid: %s'%(self.servername,os.getpid())) reactor.run()
mit
RafaelTorrealba/odoo
openerp/addons/test_new_api/models.py
79
9125
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2013-2014 OpenERP (<http://www.openerp.com>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import datetime from openerp.exceptions import AccessError ############################################################################## # # OLD API # ############################################################################## from openerp.osv import osv, fields class res_partner(osv.Model): _inherit = 'res.partner' # # add related fields to test them # _columns = { # a regular one 'related_company_partner_id': fields.related( 'company_id', 'partner_id', type='many2one', obj='res.partner'), # a related field with a single field 'single_related_company_id': fields.related( 'company_id', type='many2one', obj='res.company'), # a related field with a single field that is also a related field! 'related_related_company_id': fields.related( 'single_related_company_id', type='many2one', obj='res.company'), } class TestFunctionCounter(osv.Model): _name = 'test_old_api.function_counter' def _compute_cnt(self, cr, uid, ids, fname, arg, context=None): res = {} for cnt in self.browse(cr, uid, ids, context=context): res[cnt.id] = cnt.access and cnt.cnt + 1 or 0 return res _columns = { 'access': fields.datetime('Datetime Field'), 'cnt': fields.function( _compute_cnt, type='integer', string='Function Field', store=True), } class TestFunctionNoInfiniteRecursion(osv.Model): _name = 'test_old_api.function_noinfiniterecursion' def _compute_f1(self, cr, uid, ids, fname, arg, context=None): res = {} for tf in self.browse(cr, uid, ids, context=context): res[tf.id] = 'create' in tf.f0 and 'create' or 'write' cntobj = self.pool['test_old_api.function_counter'] cnt_id = self.pool['ir.model.data'].xmlid_to_res_id( cr, uid, 'test_new_api.c1') cntobj.write( cr, uid, cnt_id, {'access': datetime.datetime.now()}, context=context) return res _columns = { 'f0': fields.char('Char Field'), 'f1': fields.function( _compute_f1, type='char', string='Function Field', store=True), } ############################################################################## # # NEW API # ############################################################################## from openerp import models, fields, api, _ class Category(models.Model): _name = 'test_new_api.category' name = fields.Char(required=True) parent = fields.Many2one('test_new_api.category') display_name = fields.Char(compute='_compute_display_name', inverse='_inverse_display_name') discussions = fields.Many2many('test_new_api.discussion', 'test_new_api_discussion_category', 'category', 'discussion') @api.one @api.depends('name', 'parent.display_name') # this definition is recursive def _compute_display_name(self): if self.parent: self.display_name = self.parent.display_name + ' / ' + self.name else: self.display_name = self.name @api.one def _inverse_display_name(self): names = self.display_name.split('/') # determine sequence of categories categories = [] for name in names[:-1]: category = self.search([('name', 'ilike', name.strip())]) categories.append(category[0]) categories.append(self) # assign parents following sequence for parent, child in zip(categories, categories[1:]): if parent and child: child.parent = parent # assign name of last category, and reassign display_name (to normalize it) self.name = names[-1].strip() def read(self, fields=None, load='_classic_read'): if self.search_count([('id', 'in', self._ids), ('name', '=', 'NOACCESS')]): raise AccessError('Sorry') return super(Category, self).read(fields, load) class Discussion(models.Model): _name = 'test_new_api.discussion' name = fields.Char(string='Title', required=True, help="General description of what this discussion is about.") moderator = fields.Many2one('res.users') categories = fields.Many2many('test_new_api.category', 'test_new_api_discussion_category', 'discussion', 'category') participants = fields.Many2many('res.users') messages = fields.One2many('test_new_api.message', 'discussion') message_changes = fields.Integer(string='Message changes') @api.onchange('moderator') def _onchange_moderator(self): self.participants |= self.moderator @api.onchange('messages') def _onchange_messages(self): self.message_changes = len(self.messages) class Message(models.Model): _name = 'test_new_api.message' discussion = fields.Many2one('test_new_api.discussion', ondelete='cascade') body = fields.Text() author = fields.Many2one('res.users', default=lambda self: self.env.user) name = fields.Char(string='Title', compute='_compute_name', store=True) display_name = fields.Char(string='Abstract', compute='_compute_display_name') size = fields.Integer(compute='_compute_size', search='_search_size') double_size = fields.Integer(compute='_compute_double_size') discussion_name = fields.Char(related='discussion.name') @api.one @api.constrains('author', 'discussion') def _check_author(self): if self.discussion and self.author not in self.discussion.participants: raise ValueError(_("Author must be among the discussion participants.")) @api.one @api.depends('author.name', 'discussion.name') def _compute_name(self): self.name = "[%s] %s" % (self.discussion.name or '', self.author.name or '') @api.one @api.depends('author.name', 'discussion.name', 'body') def _compute_display_name(self): stuff = "[%s] %s: %s" % (self.author.name, self.discussion.name or '', self.body or '') self.display_name = stuff[:80] @api.one @api.depends('body') def _compute_size(self): self.size = len(self.body or '') def _search_size(self, operator, value): if operator not in ('=', '!=', '<', '<=', '>', '>=', 'in', 'not in'): return [] # retrieve all the messages that match with a specific SQL query query = """SELECT id FROM "%s" WHERE char_length("body") %s %%s""" % \ (self._table, operator) self.env.cr.execute(query, (value,)) ids = [t[0] for t in self.env.cr.fetchall()] return [('id', 'in', ids)] @api.one @api.depends('size') def _compute_double_size(self): # This illustrates a subtle situation: self.double_size depends on # self.size. When size is computed, self.size is assigned, which should # normally invalidate self.double_size. However, this may not happen # while self.double_size is being computed: the last statement below # would fail, because self.double_size would be undefined. self.double_size = 0 size = self.size self.double_size = self.double_size + size class MixedModel(models.Model): _name = 'test_new_api.mixed' number = fields.Float(digits=(10, 2), default=3.14) date = fields.Date() now = fields.Datetime(compute='_compute_now') lang = fields.Selection(string='Language', selection='_get_lang') reference = fields.Reference(string='Related Document', selection='_reference_models') @api.one def _compute_now(self): # this is a non-stored computed field without dependencies self.now = fields.Datetime.now() @api.model def _get_lang(self): langs = self.env['res.lang'].search([]) return [(lang.code, lang.name) for lang in langs] @api.model def _reference_models(self): models = self.env['ir.model'].search([('state', '!=', 'manual')]) return [(model.model, model.name) for model in models if not model.model.startswith('ir.')]
agpl-3.0
akhilari7/pa-dude
lib/python2.7/site-packages/django/db/migrations/state.py
31
25662
from __future__ import unicode_literals import copy from collections import OrderedDict from contextlib import contextmanager from django.apps import AppConfig from django.apps.registry import Apps, apps as global_apps from django.conf import settings from django.db import models from django.db.models.fields.proxy import OrderWrt from django.db.models.fields.related import RECURSIVE_RELATIONSHIP_CONSTANT from django.db.models.options import DEFAULT_NAMES, normalize_together from django.db.models.utils import make_model_tuple from django.utils import six from django.utils.encoding import force_text, smart_text from django.utils.functional import cached_property from django.utils.module_loading import import_string from django.utils.version import get_docs_version from .exceptions import InvalidBasesError def _get_app_label_and_model_name(model, app_label=''): if isinstance(model, six.string_types): split = model.split('.', 1) return (tuple(split) if len(split) == 2 else (app_label, split[0])) else: return model._meta.app_label, model._meta.model_name def get_related_models_recursive(model): """ Returns all models that have a direct or indirect relationship to the given model. Relationships are either defined by explicit relational fields, like ForeignKey, ManyToManyField or OneToOneField, or by inheriting from another model (a superclass is related to its subclasses, but not vice versa). Note, however, that a model inheriting from a concrete model is also related to its superclass through the implicit *_ptr OneToOneField on the subclass. """ def _related_models(m): return [ f.related_model for f in m._meta.get_fields(include_parents=True, include_hidden=True) if f.is_relation and f.related_model is not None and not isinstance(f.related_model, six.string_types) ] + [ subclass for subclass in m.__subclasses__() if issubclass(subclass, models.Model) ] seen = set() queue = _related_models(model) for rel_mod in queue: rel_app_label, rel_model_name = rel_mod._meta.app_label, rel_mod._meta.model_name if (rel_app_label, rel_model_name) in seen: continue seen.add((rel_app_label, rel_model_name)) queue.extend(_related_models(rel_mod)) return seen - {(model._meta.app_label, model._meta.model_name)} class ProjectState(object): """ Represents the entire project's overall state. This is the item that is passed around - we do it here rather than at the app level so that cross-app FKs/etc. resolve properly. """ def __init__(self, models=None, real_apps=None): self.models = models or {} # Apps to include from main registry, usually unmigrated ones self.real_apps = real_apps or [] def add_model(self, model_state): app_label, model_name = model_state.app_label, model_state.name_lower self.models[(app_label, model_name)] = model_state if 'apps' in self.__dict__: # hasattr would cache the property self.reload_model(app_label, model_name) def remove_model(self, app_label, model_name): del self.models[app_label, model_name] if 'apps' in self.__dict__: # hasattr would cache the property self.apps.unregister_model(app_label, model_name) # Need to do this explicitly since unregister_model() doesn't clear # the cache automatically (#24513) self.apps.clear_cache() def reload_model(self, app_label, model_name): if 'apps' in self.__dict__: # hasattr would cache the property try: old_model = self.apps.get_model(app_label, model_name) except LookupError: related_models = set() else: # Get all relations to and from the old model before reloading, # as _meta.apps may change related_models = get_related_models_recursive(old_model) # Get all outgoing references from the model to be rendered model_state = self.models[(app_label, model_name)] # Directly related models are the models pointed to by ForeignKeys, # OneToOneFields, and ManyToManyFields. direct_related_models = set() for name, field in model_state.fields: if field.is_relation: if field.remote_field.model == RECURSIVE_RELATIONSHIP_CONSTANT: continue rel_app_label, rel_model_name = _get_app_label_and_model_name(field.related_model, app_label) direct_related_models.add((rel_app_label, rel_model_name.lower())) # For all direct related models recursively get all related models. related_models.update(direct_related_models) for rel_app_label, rel_model_name in direct_related_models: try: rel_model = self.apps.get_model(rel_app_label, rel_model_name) except LookupError: pass else: related_models.update(get_related_models_recursive(rel_model)) # Include the model itself related_models.add((app_label, model_name)) # Unregister all related models with self.apps.bulk_update(): for rel_app_label, rel_model_name in related_models: self.apps.unregister_model(rel_app_label, rel_model_name) states_to_be_rendered = [] # Gather all models states of those models that will be rerendered. # This includes: # 1. All related models of unmigrated apps for model_state in self.apps.real_models: if (model_state.app_label, model_state.name_lower) in related_models: states_to_be_rendered.append(model_state) # 2. All related models of migrated apps for rel_app_label, rel_model_name in related_models: try: model_state = self.models[rel_app_label, rel_model_name] except KeyError: pass else: states_to_be_rendered.append(model_state) # Render all models self.apps.render_multiple(states_to_be_rendered) def clone(self): "Returns an exact copy of this ProjectState" new_state = ProjectState( models={k: v.clone() for k, v in self.models.items()}, real_apps=self.real_apps, ) if 'apps' in self.__dict__: new_state.apps = self.apps.clone() return new_state @cached_property def apps(self): return StateApps(self.real_apps, self.models) @property def concrete_apps(self): self.apps = StateApps(self.real_apps, self.models, ignore_swappable=True) return self.apps @classmethod def from_apps(cls, apps): "Takes in an Apps and returns a ProjectState matching it" app_models = {} for model in apps.get_models(include_swapped=True): model_state = ModelState.from_model(model) app_models[(model_state.app_label, model_state.name_lower)] = model_state return cls(app_models) def __eq__(self, other): if set(self.models.keys()) != set(other.models.keys()): return False if set(self.real_apps) != set(other.real_apps): return False return all(model == other.models[key] for key, model in self.models.items()) def __ne__(self, other): return not (self == other) class AppConfigStub(AppConfig): """ Stubs a Django AppConfig. Only provides a label, and a dict of models. """ # Not used, but required by AppConfig.__init__ path = '' def __init__(self, label): self.label = label # App-label and app-name are not the same thing, so technically passing # in the label here is wrong. In practice, migrations don't care about # the app name, but we need something unique, and the label works fine. super(AppConfigStub, self).__init__(label, None) def import_models(self, all_models): self.models = all_models class StateApps(Apps): """ Subclass of the global Apps registry class to better handle dynamic model additions and removals. """ def __init__(self, real_apps, models, ignore_swappable=False): # Any apps in self.real_apps should have all their models included # in the render. We don't use the original model instances as there # are some variables that refer to the Apps object. # FKs/M2Ms from real apps are also not included as they just # mess things up with partial states (due to lack of dependencies) self.real_models = [] for app_label in real_apps: app = global_apps.get_app_config(app_label) for model in app.get_models(): self.real_models.append(ModelState.from_model(model, exclude_rels=True)) # Populate the app registry with a stub for each application. app_labels = {model_state.app_label for model_state in models.values()} app_configs = [AppConfigStub(label) for label in sorted(real_apps + list(app_labels))] super(StateApps, self).__init__(app_configs) self.render_multiple(list(models.values()) + self.real_models) # There shouldn't be any operations pending at this point. pending_models = set(self._pending_operations) if ignore_swappable: pending_models -= {make_model_tuple(settings.AUTH_USER_MODEL)} if pending_models: raise ValueError(self._pending_models_error(pending_models)) def _pending_models_error(self, pending_models): """ Almost all internal uses of lazy operations are to resolve string model references in related fields. We can extract the fields from those operations and use them to provide a nicer error message. This will work for any function passed to lazy_related_operation() that has a keyword argument called 'field'. """ def extract_field(operation): # operation is annotated with the field in # apps.registry.Apps.lazy_model_operation(). return getattr(operation, 'field', None) def extract_field_names(operations): return (str(field) for field in map(extract_field, operations) if field) get_ops = self._pending_operations.__getitem__ # Ordered list of pairs of the form # ((app_label, model_name), [field_name_1, field_name_2, ...]) models_fields = sorted( (model_key, sorted(extract_field_names(get_ops(model_key)))) for model_key in pending_models ) def model_text(model_key, fields): field_list = ", ".join(fields) field_text = " (referred to by fields: %s)" % field_list if fields else "" return ("%s.%s" % model_key) + field_text msg = "Unhandled pending operations for models:" return "\n ".join([msg] + [model_text(*i) for i in models_fields]) @contextmanager def bulk_update(self): # Avoid clearing each model's cache for each change. Instead, clear # all caches when we're finished updating the model instances. ready = self.ready self.ready = False try: yield finally: self.ready = ready self.clear_cache() def render_multiple(self, model_states): # We keep trying to render the models in a loop, ignoring invalid # base errors, until the size of the unrendered models doesn't # decrease by at least one, meaning there's a base dependency loop/ # missing base. if not model_states: return # Prevent that all model caches are expired for each render. with self.bulk_update(): unrendered_models = model_states while unrendered_models: new_unrendered_models = [] for model in unrendered_models: try: model.render(self) except InvalidBasesError: new_unrendered_models.append(model) if len(new_unrendered_models) == len(unrendered_models): raise InvalidBasesError( "Cannot resolve bases for %r\nThis can happen if you are inheriting models from an " "app with migrations (e.g. contrib.auth)\n in an app with no migrations; see " "https://docs.djangoproject.com/en/%s/topics/migrations/#dependencies " "for more" % (new_unrendered_models, get_docs_version()) ) unrendered_models = new_unrendered_models def clone(self): """ Return a clone of this registry, mainly used by the migration framework. """ clone = StateApps([], {}) clone.all_models = copy.deepcopy(self.all_models) clone.app_configs = copy.deepcopy(self.app_configs) # No need to actually clone them, they'll never change clone.real_models = self.real_models return clone def register_model(self, app_label, model): self.all_models[app_label][model._meta.model_name] = model if app_label not in self.app_configs: self.app_configs[app_label] = AppConfigStub(app_label) self.app_configs[app_label].models = OrderedDict() self.app_configs[app_label].models[model._meta.model_name] = model self.do_pending_operations(model) self.clear_cache() def unregister_model(self, app_label, model_name): try: del self.all_models[app_label][model_name] del self.app_configs[app_label].models[model_name] except KeyError: pass class ModelState(object): """ Represents a Django Model. We don't use the actual Model class as it's not designed to have its options changed - instead, we mutate this one and then render it into a Model as required. Note that while you are allowed to mutate .fields, you are not allowed to mutate the Field instances inside there themselves - you must instead assign new ones, as these are not detached during a clone. """ def __init__(self, app_label, name, fields, options=None, bases=None, managers=None): self.app_label = app_label self.name = force_text(name) self.fields = fields self.options = options or {} self.bases = bases or (models.Model, ) self.managers = managers or [] # Sanity-check that fields is NOT a dict. It must be ordered. if isinstance(self.fields, dict): raise ValueError("ModelState.fields cannot be a dict - it must be a list of 2-tuples.") for name, field in fields: # Sanity-check that fields are NOT already bound to a model. if hasattr(field, 'model'): raise ValueError( 'ModelState.fields cannot be bound to a model - "%s" is.' % name ) # Sanity-check that relation fields are NOT referring to a model class. if field.is_relation and hasattr(field.related_model, '_meta'): raise ValueError( 'ModelState.fields cannot refer to a model class - "%s.to" does. ' 'Use a string reference instead.' % name ) if field.many_to_many and hasattr(field.remote_field.through, '_meta'): raise ValueError( 'ModelState.fields cannot refer to a model class - "%s.through" does. ' 'Use a string reference instead.' % name ) @cached_property def name_lower(self): return self.name.lower() @classmethod def from_model(cls, model, exclude_rels=False): """ Feed me a model, get a ModelState representing it out. """ # Deconstruct the fields fields = [] for field in model._meta.local_fields: if getattr(field, "remote_field", None) and exclude_rels: continue if isinstance(field, OrderWrt): continue name = force_text(field.name, strings_only=True) try: fields.append((name, field.clone())) except TypeError as e: raise TypeError("Couldn't reconstruct field %s on %s: %s" % ( name, model._meta.label, e, )) if not exclude_rels: for field in model._meta.local_many_to_many: name = force_text(field.name, strings_only=True) try: fields.append((name, field.clone())) except TypeError as e: raise TypeError("Couldn't reconstruct m2m field %s on %s: %s" % ( name, model._meta.object_name, e, )) # Extract the options options = {} for name in DEFAULT_NAMES: # Ignore some special options if name in ["apps", "app_label"]: continue elif name in model._meta.original_attrs: if name == "unique_together": ut = model._meta.original_attrs["unique_together"] options[name] = set(normalize_together(ut)) elif name == "index_together": it = model._meta.original_attrs["index_together"] options[name] = set(normalize_together(it)) else: options[name] = model._meta.original_attrs[name] # Force-convert all options to text_type (#23226) options = cls.force_text_recursive(options) # If we're ignoring relationships, remove all field-listing model # options (that option basically just means "make a stub model") if exclude_rels: for key in ["unique_together", "index_together", "order_with_respect_to"]: if key in options: del options[key] def flatten_bases(model): bases = [] for base in model.__bases__: if hasattr(base, "_meta") and base._meta.abstract: bases.extend(flatten_bases(base)) else: bases.append(base) return bases # We can't rely on __mro__ directly because we only want to flatten # abstract models and not the whole tree. However by recursing on # __bases__ we may end up with duplicates and ordering issues, we # therefore discard any duplicates and reorder the bases according # to their index in the MRO. flattened_bases = sorted(set(flatten_bases(model)), key=lambda x: model.__mro__.index(x)) # Make our record bases = tuple( ( base._meta.label_lower if hasattr(base, "_meta") else base ) for base in flattened_bases ) # Ensure at least one base inherits from models.Model if not any((isinstance(base, six.string_types) or issubclass(base, models.Model)) for base in bases): bases = (models.Model,) # Constructs all managers on the model managers_mapping = {} def reconstruct_manager(mgr): as_manager, manager_path, qs_path, args, kwargs = mgr.deconstruct() if as_manager: qs_class = import_string(qs_path) instance = qs_class.as_manager() else: manager_class = import_string(manager_path) instance = manager_class(*args, **kwargs) # We rely on the ordering of the creation_counter of the original # instance name = force_text(mgr.name) managers_mapping[name] = (mgr.creation_counter, instance) if hasattr(model, "_default_manager"): default_manager_name = force_text(model._default_manager.name) # Make sure the default manager is always the first if model._default_manager.use_in_migrations: reconstruct_manager(model._default_manager) else: # Force this manager to be the first and thus default managers_mapping[default_manager_name] = (0, models.Manager()) # Sort all managers by their creation counter for _, manager, _ in sorted(model._meta.managers): if manager.name == "_base_manager" or not manager.use_in_migrations: continue reconstruct_manager(manager) # Sort all managers by their creation counter but take only name and # instance for further processing managers = [ (name, instance) for name, (cc, instance) in sorted(managers_mapping.items(), key=lambda v: v[1]) ] # If the only manager on the model is the default manager defined # by Django (`objects = models.Manager()`), this manager will not # be added to the model state. if managers == [('objects', models.Manager())]: managers = [] else: managers = [] # Construct the new ModelState return cls( model._meta.app_label, model._meta.object_name, fields, options, bases, managers, ) @classmethod def force_text_recursive(cls, value): if isinstance(value, six.string_types): return smart_text(value) elif isinstance(value, list): return [cls.force_text_recursive(x) for x in value] elif isinstance(value, tuple): return tuple(cls.force_text_recursive(x) for x in value) elif isinstance(value, set): return set(cls.force_text_recursive(x) for x in value) elif isinstance(value, dict): return { cls.force_text_recursive(k): cls.force_text_recursive(v) for k, v in value.items() } return value def construct_managers(self): "Deep-clone the managers using deconstruction" # Sort all managers by their creation counter sorted_managers = sorted(self.managers, key=lambda v: v[1].creation_counter) for mgr_name, manager in sorted_managers: mgr_name = force_text(mgr_name) as_manager, manager_path, qs_path, args, kwargs = manager.deconstruct() if as_manager: qs_class = import_string(qs_path) yield mgr_name, qs_class.as_manager() else: manager_class = import_string(manager_path) yield mgr_name, manager_class(*args, **kwargs) def clone(self): "Returns an exact copy of this ModelState" return self.__class__( app_label=self.app_label, name=self.name, fields=list(self.fields), options=dict(self.options), bases=self.bases, managers=list(self.managers), ) def render(self, apps): "Creates a Model object from our current state into the given apps" # First, make a Meta object meta_contents = {'app_label': self.app_label, "apps": apps} meta_contents.update(self.options) meta = type(str("Meta"), tuple(), meta_contents) # Then, work out our bases try: bases = tuple( (apps.get_model(base) if isinstance(base, six.string_types) else base) for base in self.bases ) except LookupError: raise InvalidBasesError("Cannot resolve one or more bases from %r" % (self.bases,)) # Turn fields into a dict for the body, add other bits body = {name: field.clone() for name, field in self.fields} body['Meta'] = meta body['__module__'] = "__fake__" # Restore managers body.update(self.construct_managers()) # Then, make a Model object (apps.register_model is called in __new__) return type( str(self.name), bases, body, ) def get_field_by_name(self, name): for fname, field in self.fields: if fname == name: return field raise ValueError("No field called %s on model %s" % (name, self.name)) def __repr__(self): return "<ModelState: '%s.%s'>" % (self.app_label, self.name) def __eq__(self, other): return ( (self.app_label == other.app_label) and (self.name == other.name) and (len(self.fields) == len(other.fields)) and all((k1 == k2 and (f1.deconstruct()[1:] == f2.deconstruct()[1:])) for (k1, f1), (k2, f2) in zip(self.fields, other.fields)) and (self.options == other.options) and (self.bases == other.bases) and (self.managers == other.managers) ) def __ne__(self, other): return not (self == other)
mit
dumbbell/virt-manager
src/virtManager/remote.py
3
2157
# # Copyright (C) 2006 Red Hat, Inc. # Copyright (C) 2006 Daniel P. Berrange <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301 USA. # import dbus.service class vmmRemote(dbus.service.Object): def __init__(self, engine, bus_name, object_path="/com/redhat/virt/manager"): dbus.service.Object.__init__(self, bus_name, object_path) self.engine = engine @dbus.service.method("com.redhat.virt.manager", in_signature="s") def show_domain_creator(self, uri): self.engine.show_domain_creator(str(uri)) @dbus.service.method("com.redhat.virt.manager", in_signature="ss") def show_domain_editor(self, uri, uuid): self.engine.show_domain_editor(str(uri), str(uuid)) @dbus.service.method("com.redhat.virt.manager", in_signature="ss") def show_domain_performance(self, uri, uuid): self.engine.show_domain_performance(str(uri), str(uuid)) @dbus.service.method("com.redhat.virt.manager", in_signature="ss") def show_domain_console(self, uri, uuid): self.engine.show_domain_console(str(uri), str(uuid)) @dbus.service.method("com.redhat.virt.manager", in_signature="s") def show_host_summary(self, uri): self.engine.show_host_summary(str(uri)) @dbus.service.method("com.redhat.virt.manager", in_signature="") def show_manager(self): self.engine.show_manager() @dbus.service.method("com.redhat.virt.manager") def show_connect(self): self.engine.show_connect()
gpl-2.0
emilopez/pydem
pydem/examples/cross-tile_process_manager_test.py
3
8813
# -*- coding: utf-8 -*- """ Copyright 2015 Creare Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ if __name__ == "__main__": import numpy as np import os from pydem.processing_manager import ProcessManager from pydem.test_pydem import make_test_files, mk_test_multifile #%% Make the test case files NN = [300, 400, 660, 740] test_num = 32 testdir = 'testtiff' make_test_files(NN, testnum=test_num, testdir=testdir, plotflag=False) mk_test_multifile(test_num, NN, testdir, nx_grid=3, ny_grid=4, nx_overlap=16, ny_overlap=32) path = r'testtiff\chunks' # Remove a couple of these files so that we only have 4 tiles, and we # know where they should drain to files = os.listdir(path) files.sort() for i, fil in enumerate(files): print i, fil delete_ids = [0, 1, 2, 3, 4, 5, 6, 9] for d_id in delete_ids: os.remove(os.path.join(path, files[d_id])) # Create the ProcessManager object savepath = r'testtiff\processed_data' pm = ProcessManager(path, savepath) pm._DEBUG = True # Save out the magnitude and slope pm.elev_source_files.sort() esfile = pm.elev_source_files[1] # Start with lower-left tile and go CCW # Start twi calculation for first tile fn, status = pm.calculate_twi(esfile, save_path=pm.save_path, do_edges=False) edge_init_data, edge_init_done, edge_init_todo = \ pm.tile_edge.get_edge_init_data(esfile) # THe only valuable information here is the edge_init_todo, which is self-set # In this case the right edge of the tile is the edge that needs, # information, so the right todo should be True np.testing.assert_(np.all(edge_init_todo['right'][1:-1])) #don't look at corners np.testing.assert_(np.all(~edge_init_todo['left'][1:-1])) #don't look at corners # Next we check that the right and top neighbors are correctly set also top = pm.tile_edge.neighbors[esfile]['top'] edge_init_data, edge_init_done, edge_init_todo = pm.tile_edge.get_edge_init_data(top) np.testing.assert_(np.all(~edge_init_done['bottom'][1:-1])) #don't look at corners # stop right = pm.tile_edge.neighbors[esfile]['right'] edge_init_data, edge_init_done, edge_init_todo = pm.tile_edge.get_edge_init_data(right) np.testing.assert_(np.all(~edge_init_done['left'][1:-1])) #don't look at corners topright = pm.tile_edge.neighbors[esfile]['top-right'] edge_init_data, edge_init_done, edge_init_todo = pm.tile_edge.get_edge_init_data(topright) np.testing.assert_(np.all(~edge_init_done['left'][1:-1])) #don't look at corners np.testing.assert_(np.all(~edge_init_done['bottom'][1:-1])) #don't look at corners # pm.tile_edge.visualize_neighbors() # do the next tile esfile = pm.elev_source_files[0] fn, status = pm.calculate_twi(esfile, save_path=pm.save_path, do_edges=False) edge_init_data, edge_init_done, edge_init_todo = pm.tile_edge.get_edge_init_data(esfile) np.testing.assert_(np.all(~edge_init_todo['right'][1:-1])) np.testing.assert_(np.all(~edge_init_todo['left'][1:-1])) np.testing.assert_(np.all(~edge_init_todo['top'][1:-1])) np.testing.assert_(np.all(~edge_init_todo['bottom'][1:-1])) # Next we check that the left and top neighbors are correctly set also top = pm.tile_edge.neighbors[esfile]['top'] edge_init_data, edge_init_done, edge_init_todo = pm.tile_edge.get_edge_init_data(top) np.testing.assert_(np.all(edge_init_done['bottom'])) left = pm.tile_edge.neighbors[esfile]['left'] edge_init_data, edge_init_done, edge_init_todo = pm.tile_edge.get_edge_init_data(left) np.testing.assert_(np.all(edge_init_done['right'])) topleft = pm.tile_edge.neighbors[esfile]['top-left'] edge_init_data, edge_init_done, edge_init_todo = pm.tile_edge.get_edge_init_data(topleft) np.testing.assert_(np.any(edge_init_done['right'])) np.testing.assert_(np.any(edge_init_done['bottom'])) # pm.tile_edge.visualize_neighbors() # Do the third tile esfile = pm.elev_source_files[2] fn, status = pm.calculate_twi(esfile, save_path=pm.save_path, do_edges=False) edge_init_data, edge_init_done, edge_init_todo = pm.tile_edge.get_edge_init_data(esfile) np.testing.assert_(np.all(~edge_init_todo['right'][1:-1])) np.testing.assert_(np.all(~edge_init_todo['left'][1:-1])) np.testing.assert_(np.all(~edge_init_todo['top'][1:-1])) np.testing.assert_(np.all(~edge_init_todo['bottom'][1:-1])) # Next we check that the left and top neighbors are correctly set also left = pm.tile_edge.neighbors[esfile]['left'] edge_init_data, edge_init_done, edge_init_todo = pm.tile_edge.get_edge_init_data(left) np.testing.assert_(np.all(edge_init_done['right'])) bottomleft = pm.tile_edge.neighbors[esfile]['bottom-left'] edge_init_data, edge_init_done, edge_init_todo = pm.tile_edge.get_edge_init_data(bottomleft) np.testing.assert_(np.any(edge_init_done['right'])) np.testing.assert_(np.any(edge_init_done['top'])) # pm.tile_edge.visualize_neighbors() # a1 = pm.dem_proc.uca.copy() # esfile = pm.elev_source_files[2] # coords1 = parse_fn(esfile) # imshow(a1, interpolation='none', # extent=[coords1[1], coords1[3], coords1[0], coords1[2]]);clim(0, a1.max()) # crds = pm.tile_edge.edges[left]['right'].get_coordinates() # edge_init_data, edge_init_done, edge_init_todo = pm.tile_edge.get_edge_init_data(left) # imshow(edge_init_data['right'][:, None], interpolation='none', # extent=[crds[:, 1].min(), crds[:, 1].max()+0.3 / a1.shape[0], # crds[:, 0].min(), crds[:, 0].max()]);clim(0, a1.max()) # xlim(coords1[1], coords1[3]) # ylim(coords1[0], coords1[2]) #%%Do the final tile to complete the first round (non-edge resolving) esfile = pm.elev_source_files[3] fn, status = pm.calculate_twi(esfile, save_path=pm.save_path, do_edges=False) edge_init_data, edge_init_done, edge_init_todo = pm.tile_edge.get_edge_init_data(esfile) np.testing.assert_(np.all(~edge_init_todo['right'][1:-1])) np.testing.assert_(np.all(~edge_init_todo['left'][1:-1])) np.testing.assert_(np.all(~edge_init_todo['top'][1:-1])) np.testing.assert_(np.any(~edge_init_todo['bottom'][1:-1])) # mixed on bottom np.testing.assert_(np.any(edge_init_todo['bottom'][1:-1])) # mixed on bottom # This one has no neighbors to check (no downstream dependencies) # a2 = pm.dem_proc.uca.copy() # esfile = pm.elev_source_files[3] # coords = parse_fn(esfile) # imshow(a2, extent=[coords[1], coords[3], coords[0], coords[2]], # interpolation='none');clim(0, a1.max()) # xlim(coords[1], coords1[3]) # Now let us start the edge resolution round. There are only 2 tiles that # require edge resolution # %% i = pm.tile_edge.find_best_candidate(pm.elev_source_files) np.testing.assert_(i==1) # should be the first tile esfile = pm.elev_source_files[i] fn, status = pm.calculate_twi(esfile, save_path=pm.save_path, do_edges=True) edge_init_data, edge_init_done, edge_init_todo = pm.tile_edge.get_edge_init_data(esfile) np.testing.assert_(np.all(~edge_init_todo['right'][1:-1])) np.testing.assert_(np.all(~edge_init_todo['left'][1:-1])) np.testing.assert_(np.all(~edge_init_todo['top'][1:-1])) np.testing.assert_(np.all(~edge_init_todo['bottom'][1:-1])) # check neihbors top = pm.tile_edge.neighbors[esfile]['top'] edge_init_data, edge_init_done, edge_init_todo = pm.tile_edge.get_edge_init_data(top) np.testing.assert_(np.all(edge_init_done['bottom'][1:-1])) #don't look at corners right = pm.tile_edge.neighbors[esfile]['right'] edge_init_data, edge_init_done, edge_init_todo = pm.tile_edge.get_edge_init_data(right) np.testing.assert_(np.all(edge_init_done['left'][1:-1])) #don't look at corners i = pm.tile_edge.find_best_candidate(pm.elev_source_files) np.testing.assert_(i==3) # should be the last tile esfile = pm.elev_source_files[i] fn, status = pm.calculate_twi(esfile, save_path=pm.save_path, do_edges=True)
apache-2.0
CLVsol/oehealth
oehealth_dispensation/oehealth_dispensation.py
1
9325
# -*- encoding: utf-8 -*- ################################################################################ # # # Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol # # # # This program is free software: you can redistribute it and/or modify # # it under the terms of the GNU Affero General Public License as published by # # the Free Software Foundation, either version 3 of the License, or # # (at your option) any later version. # # # # This program is distributed in the hope that it will be useful, # # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # # GNU Affero General Public License for more details. # # # # You should have received a copy of the GNU Affero General Public License # # along with this program. If not, see <http://www.gnu.org/licenses/>. # ################################################################################ from osv import osv from osv import fields import time class oehealth_dispensation(osv.Model): _name='oehealth.dispensation' def _compute_create_uid(self, cr, uid, ids, field_name, arg, context={}): result = {} for r in self.browse(cr, uid, ids, context=context): perms = self.perm_read(cr, uid, ids) create_uid = perms[0].get('create_uid', 'n/a') result[r.id] = create_uid return result def _compute_create_date(self, cr, uid, ids, field_name, arg, context={}): result = {} for r in self.browse(cr, uid, ids, context=context): perms = self.perm_read(cr, uid, ids) create_date = perms[0].get('create_date', 'n/a') result[r.id] = create_date return result def _compute_write_uid(self, cr, uid, ids, field_name, arg, context={}): result = {} for r in self.browse(cr, uid, ids, context=context): perms = self.perm_read(cr, uid, ids) write_uid = perms[0].get('write_uid', 'n/a') result[r.id] = write_uid return result def _compute_write_date(self, cr, uid, ids, field_name, arg, context={}): result = {} for r in self.browse(cr, uid, ids, context=context): perms = self.perm_read(cr, uid, ids) write_date = perms[0].get('write_date', 'n/a') result[r.id] = write_date return result _columns={ 'name': fields.char(size=256, string='Dispensation ID', required=True, help='Type in the ID of this dispensation'), 'dispensation_date': fields.date(string='Dispensation Date', required=True), 'prescription_date': fields.date(string='Prescription Date', required=True), 'prescriber_id': fields.many2one('oehealth.prescriber', string='Prescriber', required=True), #'patient_id': fields.many2one('oehealth.patient', string='Patient', required=True), #'pregnancy_warning': fields.boolean(string='Pregancy Warning', readonly=True), 'notes': fields.text(string='Prescription Notes'), #'prescription_line': fields.one2many('oehealth.dispensation.line', # 'pbm_prescription_order_id', # string='Dispensation line',), 'prescription_line': fields.one2many('oehealth.medicament.template', 'dispensation_id', string='Prescription lines',), #'pbm_prescription_warning_ack': fields.boolean(string='Dispensation verified'), #'user_id': fields.many2one('res.users', string='Prescribing Doctor', required=True), 'active': fields.boolean('Active', help="The active field allows you to hide the dispensation without removing it."), 'state': fields.selection([('new','New'), ('revised','Revised'), ('waiting','Waiting'), ('okay','Okay')], 'Stage', readonly=True), 'create_uid': fields.function(_compute_create_uid, method=True, type='char', string='Create User',), 'create_date': fields.function(_compute_create_date, method=True, type='datetime', string='Create Date',), 'write_uid': fields.function(_compute_write_uid, method=True, type='char', string='Write User',), 'write_date': fields.function(_compute_write_date, method=True, type='datetime', string='Write Date',), } _sql_constraints = [ ('uniq_name', 'unique(name)', "The Dispensation ID must be unique!"), ] _defaults={ 'name': '/', 'dispensation_date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'), 'active': 1, 'state': 'new', } def create(self, cr, uid, vals, context=None): if context is None: context = {} if not 'name' in vals or vals['name'] == '/': val = self.pool.get('ir.sequence').get(cr, uid, 'oehealth.dispensation.code') code = map(int, str(val)) code_len = len(code) while len(code) < 14: code.insert(0, 0) while len(code) < 16: n = sum([(len(code) + 1 - i) * v for i, v in enumerate(code)]) % 11 if n > 1: f = 11 - n else: f = 0 code.append(f) code_str = "%s.%s.%s.%s.%s-%s" % (str(code[0]) + str(code[1]), str(code[2]) + str(code[3]) + str(code[4]), str(code[5]) + str(code[6]) + str(code[7]), str(code[8]) + str(code[9]) + str(code[10]), str(code[11]) + str(code[12]) + str(code[13]), str(code[14]) + str(code[15])) if code_len <= 3: vals['name'] = code_str[18 - code_len:21] elif code_len > 3 and code_len <= 6: vals['name'] = code_str[17 - code_len:21] elif code_len > 6 and code_len <= 9: vals['name'] = code_str[16 - code_len:21] elif code_len > 9 and code_len <= 12: vals['name'] = code_str[15 - code_len:21] elif code_len > 12 and code_len <= 14: vals['name'] = code_str[14 - code_len:21] return super(oehealth_dispensation, self).create(cr, uid, vals, context) def oehealth_dispensation_new(self, cr, uid, ids): self.write(cr, uid, ids, {'state': 'new'}) return True def oehealth_dispensation_revised(self, cr, uid, ids): self.write(cr, uid, ids, {'state': 'revised'}) return True def oehealth_dispensation_waiting(self, cr, uid, ids): self.write(cr, uid, ids, {'state': 'waiting'}) return True def oehealth_dispensation_okay(self, cr, uid, ids): self.write(cr, uid, ids, {'state': 'okay'}) return True def get_authorization(self, cr, uid, ids, context={}): data=ids test_request_obj = self.pool.get('oehealth.dispensation') lab_obj = self.pool.get('oehealth.dispensation') test_report_data={} test_cases = [] test_obj = test_request_obj.browse(cr, uid, context.get('active_id'), context=context) #if test_obj.state == 'tested': if test_obj.state != 'tested': #raise osv.except_osv(_('UserError'),_('Test Report already created.')) raise osv.except_osv(('UserError'),('Test Report already created.')) test_report_data['test'] = test_obj.name.id test_report_data['patient'] = test_obj.patient_id.id #test_report_data['requestor'] = test_obj.doctor_id.id test_report_data['date_requested'] = test_obj.date for criterion in test_obj.name.criteria: test_cases.append((0,0,{'name':criterion.name, 'sequence':criterion.sequence, 'normal_range':criterion.normal_range, 'unit':criterion.unit.id, })) test_report_data['criteria'] = test_cases lab_id = lab_obj.create(cr,uid,test_report_data,context=context) test_request_obj.write(cr, uid, context.get('active_id'), {'state':'tested'}) return { 'domain': "[('id','=', "+str(lab_id)+")]", 'name': 'Lab Test Report', 'view_type': 'form', 'view_mode': 'tree,form', 'res_model': 'oehealth.lab_test', 'type': 'ir.actions.act_window' } oehealth_dispensation()
agpl-3.0
HonzaKral/django
django/conf/locale/nn/formats.py
504
1810
# -*- encoding: utf-8 -*- # This file is distributed under the same license as the Django package. # from __future__ import unicode_literals # The *_FORMAT strings use the Django date format syntax, # see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date DATE_FORMAT = 'j. F Y' TIME_FORMAT = 'H:i' DATETIME_FORMAT = 'j. F Y H:i' YEAR_MONTH_FORMAT = 'F Y' MONTH_DAY_FORMAT = 'j. F' SHORT_DATE_FORMAT = 'd.m.Y' SHORT_DATETIME_FORMAT = 'd.m.Y H:i' FIRST_DAY_OF_WEEK = 1 # Monday # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior # Kept ISO formats as they are in first position DATE_INPUT_FORMATS = [ '%Y-%m-%d', '%d.%m.%Y', '%d.%m.%y', # '2006-10-25', '25.10.2006', '25.10.06' # '%d. %b %Y', '%d %b %Y', # '25. okt 2006', '25 okt 2006' # '%d. %b. %Y', '%d %b. %Y', # '25. okt. 2006', '25 okt. 2006' # '%d. %B %Y', '%d %B %Y', # '25. oktober 2006', '25 oktober 2006' ] DATETIME_INPUT_FORMATS = [ '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' '%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200' '%Y-%m-%d %H:%M', # '2006-10-25 14:30' '%Y-%m-%d', # '2006-10-25' '%Y-%m-%d', # '2006-10-25' '%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59' '%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200' '%d.%m.%Y %H:%M', # '25.10.2006 14:30' '%d.%m.%Y', # '25.10.2006' '%d.%m.%y %H:%M:%S', # '25.10.06 14:30:59' '%d.%m.%y %H:%M:%S.%f', # '25.10.06 14:30:59.000200' '%d.%m.%y %H:%M', # '25.10.06 14:30' '%d.%m.%y', # '25.10.06' ] DECIMAL_SEPARATOR = ',' THOUSAND_SEPARATOR = '\xa0' # non-breaking space NUMBER_GROUPING = 3
bsd-3-clause
shadowk29/cusumtools
legacy/minimal_psd.py
1
12009
## COPYRIGHT ## Copyright (C) 2015 Kyle Briggs (kbrig035<at>uottawa.ca) ## ## This file is part of cusumtools. ## ## This program is free software: you can redistribute it and/or modify ## it under the terms of the GNU General Public License as published by ## the Free Software Foundation, either version 3 of the License, or ## (at your option) any later version. ## ## This program is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with this program. If not, see <http://www.gnu.org/licenses/>. import matplotlib matplotlib.use('TkAgg') import numpy as np import tkinter.filedialog import tkinter as tk from matplotlib.figure import Figure from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, NavigationToolbar2TkAgg import scipy.io as sio from scipy.signal import bessel, filtfilt, welch from scikits.samplerate import resample import pylab as pl import glob import os import time import pandas as pd from pandasql import sqldf import re def make_format(current, other): # current and other are axes def format_coord(x, y): # x, y are data coordinates # convert to display coords display_coord = current.transData.transform((x,y)) inv = other.transData.inverted() # convert back to data coords with respect to ax ax_coord = inv.transform(display_coord) coords = [ax_coord, (x, y)] return ('Left: {:<40} Right: {:<}' .format(*['({:.3f}, {:.3f})'.format(x, y) for x,y in coords])) return format_coord class App(tk.Frame): def __init__(self, parent,file_path): tk.Frame.__init__(self, parent) parent.deiconify() self.events_flag = False self.baseline_flag = False self.file_path = file_path ##### Trace plotting widgets ##### self.trace_frame = tk.LabelFrame(parent,text='Current Trace') self.trace_fig = Figure(figsize=(7,5), dpi=100) self.trace_canvas = FigureCanvasTkAgg(self.trace_fig, master=self.trace_frame) self.trace_toolbar_frame = tk.Frame(self.trace_frame) self.trace_toolbar = NavigationToolbar2TkAgg(self.trace_canvas, self.trace_toolbar_frame) self.trace_toolbar.update() self.trace_frame.grid(row=0,column=0,columnspan=6,sticky=tk.N+tk.S) self.trace_toolbar_frame.grid(row=1,column=0,columnspan=6) self.trace_canvas.get_tk_widget().grid(row=0,column=0,columnspan=6) ##### PSD plotting widgets ##### self.psd_frame = tk.LabelFrame(parent,text='Power Spectrum') self.psd_fig = Figure(figsize=(7,5), dpi=100) self.psd_canvas = FigureCanvasTkAgg(self.psd_fig, master=self.psd_frame) self.psd_toolbar_frame = tk.Frame(self.psd_frame) self.psd_toolbar = NavigationToolbar2TkAgg(self.psd_canvas, self.psd_toolbar_frame) self.psd_toolbar.update() self.psd_frame.grid(row=0,column=6,columnspan=6,sticky=tk.N+tk.S) self.psd_toolbar_frame.grid(row=1,column=6,columnspan=6) self.psd_canvas.get_tk_widget().grid(row=0,column=6,columnspan=6) ##### Control widgets ##### self.control_frame = tk.LabelFrame(parent, text='Controls') self.control_frame.grid(row=2,column=0,columnspan=6,sticky=tk.N+tk.S+tk.E+tk.W) self.start_entry = tk.Entry(self.control_frame) self.start_entry.insert(0,'0') self.start_label = tk.Label(self.control_frame, text='Start Time (s)') self.start_label.grid(row=0,column=0,sticky=tk.E+tk.W) self.start_entry.grid(row=0,column=1,sticky=tk.E+tk.W) self.end_entry = tk.Entry(self.control_frame) self.end_entry.insert(0,'10') self.end_label = tk.Label(self.control_frame, text='End Time (s)') self.end_label.grid(row=0,column=2,sticky=tk.E+tk.W) self.end_entry.grid(row=0,column=3,sticky=tk.E+tk.W) self.cutoff_entry = tk.Entry(self.control_frame) self.cutoff_entry.insert(0,'') self.cutoff_label = tk.Label(self.control_frame, text='Cutoff (Hz)') self.cutoff_label.grid(row=1,column=0,sticky=tk.E+tk.W) self.cutoff_entry.grid(row=1,column=1,sticky=tk.E+tk.W) self.order_entry = tk.Entry(self.control_frame) self.order_entry.insert(0,'') self.order_label = tk.Label(self.control_frame, text='Filter Order') self.order_label.grid(row=1,column=2,sticky=tk.E+tk.W) self.order_entry.grid(row=1,column=3,sticky=tk.E+tk.W) self.samplerate_entry = tk.Entry(self.control_frame) self.samplerate_entry.insert(0,'250000') self.samplerate_label = tk.Label(self.control_frame, text='Sampling Frequency (Hz)') self.samplerate_label.grid(row=1,column=4,sticky=tk.E+tk.W) self.samplerate_entry.grid(row=1,column=5,sticky=tk.E+tk.W) self.savegain_entry = tk.Entry(self.control_frame) self.savegain_entry.insert(0,'1') self.savegain_label = tk.Label(self.control_frame, text='Sampling Frequency (Hz)') self.savegain_label.grid(row=0,column=4,sticky=tk.E+tk.W) self.savegain_entry.grid(row=0,column=5,sticky=tk.E+tk.W) self.plot_trace = tk.Button(self.control_frame, text='Update Trace', command=self.update_trace) self.plot_trace.grid(row=2,column=0,columnspan=2,sticky=tk.E+tk.W) self.normalize = tk.IntVar() self.normalize.set(0) self.normalize_check = tk.Checkbutton(self.control_frame, text='Normalize', variable = self.normalize) self.normalize_check.grid(row=2,column=2,sticky=tk.E+tk.W) self.plot_psd = tk.Button(self.control_frame, text='Update PSD', command=self.update_psd) self.plot_psd.grid(row=2,column=3,sticky=tk.E+tk.W) ##### Feedback Widgets ##### self.feedback_frame = tk.LabelFrame(parent, text='Status') self.feedback_frame.grid(row=2,column=6,columnspan=6,sticky=tk.N+tk.S+tk.E+tk.W) self.export_psd = tk.Button(self.feedback_frame, text='Export PSD',command=self.export_psd) self.export_psd.grid(row=1,column=0,columnspan=6,sticky=tk.E+tk.W) self.export_trace = tk.Button(self.feedback_frame, text='Export Trace',command=self.export_trace) self.export_trace.grid(row=2,column=0,columnspan=6,sticky=tk.E+tk.W) self.load_memmap() self.initialize_samplerate() def export_psd(self): try: data_path = tkinter.filedialog.asksaveasfilename(defaultextension='.csv',initialdir='G:\PSDs for Sam') np.savetxt(data_path,np.c_[self.f, self.Pxx, self.rms],delimiter=',') except AttributeError: self.wildcard.set('Plot the PSD first') def export_trace(self): try: data_path = tkinter.filedialog.asksaveasfilename(defaultextension='.csv',initialdir='G:\Analysis\Pores\NPN\PSDs') np.savetxt(data_path,self.plot_data,delimiter=',') except AttributeError: self.wildcard.set('Plot the trace first') def load_mapped_data(self): self.total_samples = len(self.map) self.samplerate = int(self.samplerate_entry.get()) if self.start_entry.get()!='': self.start_time = float(self.start_entry.get()) start_index = int((float(self.start_entry.get())*self.samplerate)) else: self.start_time = 0 start_index = 0 if self.end_entry.get()!='': self.end_time = float(self.end_entry.get()) end_index = int((float(self.end_entry.get())*self.samplerate)) if end_index > self.total_samples: end_index = self.total_samples self.data = self.map[start_index:end_index] self.data = float(self.savegain_entry.get()) * self.data def load_memmap(self): columntypes = np.dtype([('current', '>i2'), ('voltage', '>i2')]) self.map = np.memmap(self.file_path, dtype=columntypes, mode='r')['current'] def integrate_noise(self, f, Pxx): df = f[1]-f[0] return np.sqrt(np.cumsum(Pxx * df)) def filter_data(self): cutoff = float(self.cutoff_entry.get()) order = int(self.order_entry.get()) Wn = 2.0 * cutoff/float(self.samplerate) b, a = bessel(order,Wn,'low') padding = 1000 padded = np.pad(self.data, pad_width=padding, mode='median') self.filtered_data = filtfilt(b, a, padded, padtype=None)[padding:-padding] def initialize_samplerate(self): self.samplerate = float(self.samplerate_entry.get()) ##### Plot Updating functions ##### def update_trace(self): self.initialize_samplerate() self.load_mapped_data() self.filtered_data = self.data self.plot_data = self.filtered_data plot_samplerate = self.samplerate if self.cutoff_entry.get()!='' and self.order_entry!='': self.filter_data() self.plot_data = self.filtered_data self.trace_fig.clf() a = self.trace_fig.add_subplot(111) time = np.linspace(1.0/self.samplerate,len(self.plot_data)/float(self.samplerate),len(self.plot_data))+self.start_time a.set_xlabel(r'Time ($\mu s$)') a.set_ylabel('Current (pA)') self.trace_fig.subplots_adjust(bottom=0.14,left=0.21) a.plot(time*1e6,self.plot_data,'.',markersize=1) self.trace_canvas.show() def update_psd(self): self.initialize_samplerate() self.load_mapped_data() self.filtered_data = self.data self.plot_data = self.filtered_data plot_samplerate = self.samplerate if self.cutoff_entry.get()!='' and self.order_entry!='': self.filter_data() self.plot_data = self.filtered_data maxf = 2*float(self.cutoff_entry.get()) else: maxf = 2*float(self.samplerate_entry.get()) length = np.minimum(2**18,len(self.filtered_data)) end_index = int(np.floor(len(self.filtered_data)/length)*length) current = np.average(self.filtered_data[:end_index]) f, Pxx = welch(self.filtered_data, plot_samplerate,nperseg=length) self.rms = self.integrate_noise(f, Pxx) if self.normalize.get(): Pxx /= current**2 Pxx *= maxf/2.0 self.rms /= np.absolute(current) self.f = f self.Pxx = Pxx minf = 1 BW_index = np.searchsorted(f, maxf/2) logPxx = np.log10(Pxx[1:BW_index]) minP = 10**np.floor(np.amin(logPxx)) maxP = 10**np.ceil(np.amax(logPxx)) self.psd_fig.clf() a = self.psd_fig.add_subplot(111) a.set_xlabel('Frequency (Hz)') a.set_ylabel(r'Spectral Power ($\mathrm{pA}^2/\mathrm{Hz}$)') a.set_xlim(minf, maxf) a.set_ylim(minP, maxP) self.psd_fig.subplots_adjust(bottom=0.14,left=0.21) a.loglog(f[1:],Pxx[1:],'b-') for tick in a.get_yticklabels(): tick.set_color('b') a2 = a.twinx() a2.semilogx(f, self.rms, 'r-') a2.set_ylabel('RMS Noise (pA)') a2.set_xlim(minf, maxf) for tick in a2.get_yticklabels(): tick.set_color('r') a2.format_coord = make_format(a2, a) self.psd_canvas.show() def main(): root=tk.Tk() root.withdraw() file_path = tkinter.filedialog.askopenfilename(initialdir='C:/Data/') App(root,file_path).grid(row=0,column=0) root.mainloop() if __name__=="__main__": main()
gpl-3.0
syhpoon/xyzcmd
libxyz/vfs/vfsobj.py
1
8497
#-*- coding: utf8 -* # # Max E. Kuznecov ~syhpoon <[email protected]> 2008 # # This file is part of XYZCommander. # XYZCommander is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # XYZCommander is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser Public License for more details. # You should have received a copy of the GNU Lesser Public License # along with XYZCommander. If not, see <http://www.gnu.org/licenses/>. import os from libxyz.core.utils import bstring, ustring from libxyz.vfs import types, util class VFSObject(object): """ Abstract interface for VFS objects """ def __init__(self, xyz, path, full_path, ext_path, driver, parent, enc=None, **kwargs): self.xyz = xyz self.enc = enc or xyzenc # Internal VFS path self.path = bstring(path, self.enc) # Full VFS path self.full_path = bstring(full_path, self.enc) # External VFS path self.ext_path = bstring(ext_path, self.enc) self.parent = parent self.driver = driver self.kwargs = kwargs self.fileobj = None # File name self.name = os.path.basename(self.path) # File type self.ftype = None # Access time self.atime = None # Modified time self.mtime = None # Changed time self.ctime = None # Size in bytes self.size = None # Owner UID self.uid = None # Group self.gid = None # Mode self.mode = None # Inode self.inode = None # Visual file type self.vtype = None # Visual file representation self.visual = None # File info self.info = None # Any type-specific data self.data = None # List of significant attributes self.attributes = () self.__ni_msg = _(u"Feature not implemented") #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def is_file(self): """ Return True if instance is representing regular file """ return isinstance(self.ftype, types.VFSTypeFile) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def is_dir(self): """ Return True if instance is representing directory """ return isinstance(self.ftype, types.VFSTypeDir) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def is_dir_empty(self): """ Return True if instance is representing directory and it is empty """ if not self.is_dir(): return False _, _, objs = self.walk() return len(objs) == 0 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def is_link(self): """ Return True if instance is representing soft link """ return isinstance(self.ftype, types.VFSTypeLink) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def is_char(self): """ Return True if instance is representing soft char device """ return isinstance(self.ftype, types.VFSTypeChar) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def is_block(self): """ Return True if instance is representing block device """ return isinstance(self.ftype, types.VFSTypeBlock) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def is_fifo(self): """ Return True if instance is representing FIFO """ return isinstance(self.ftype, types.VFSTypeFifo) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def is_socket(self): """ Return True if instance is representing socket """ return isinstance(self.ftype, types.VFSTypeSocket) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def copy(self, path, existcb=None, errorcb=None, save_attrs=True, follow_links=False, cancel=None): """ Copy file to specified location @param path: Local path to copy file to @param existcb: Callback function to be called if there exists an object in target directory with the same name. Callback function receives VFSObject instance as an argument and must return one of: 'override' - to override this very object 'override all' - to override any future collisions 'skip' - to skip the object 'skip all' - to skip all future collisions 'abort' - to abort the process. If no existscb provided 'abort' is used as default @param errorcb: Callback function to be called in case an error occured during copying. Function receives VFSObject instance and error string as arguments and must return one of: 'skip' - to continue the process 'skip all' - to skip all future errors 'abort' - to abort the process. If no errorcb provided 'abort' is used as default @param save_attrs: Whether to save object attributes @param follow_links: Whether to follow symlinks @param cancel: a threading.Event instance, if it is found set - abort """ raise NotImplementedError(self.__ni_msg) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def move(self, path, existcb=None, errorcb=None, save_attrs=True, follow_links=False, cancel=None): """ Move object Arguments are the same as for copy() """ raise NotImplementedError(self.__ni_msg) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def mkdir(self, newdir): """ Create new dir inside object (only valid for directory object types) """ raise NotImplementedError(self.__ni_msg) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def remove(self, recursive=True): """ [Recursively] remove object """ raise NotImplementedError(self.__ni_msg) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def walk(self): """ Directory tree walker """ raise NotImplementedError(self.__ni_msg) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def open(self, mode='r'): """ Open self object in provided mode """ raise NotImplementedError(self.__ni_msg) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def close(self): """ Close self object """ raise NotImplementedError(self.__ni_msg) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def read(self, bytes=None): """ Read bytes from self object """ raise NotImplementedError(self.__ni_msg) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def tell(self): """ Tell file position """ raise NotImplementedError(self.__ni_msg) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def seek(self, offset, whence=None): """ Perform seek() on object """ raise NotImplementedError(self.__ni_msg) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def in_dir(self, d, e): """ Filter only those archive entries which exist in the same directory level """ if e.startswith(d.lstrip(os.sep)) and \ len(util.split_path(e)) == (len(util.split_path(d)) + 1): return True else: return False #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def __repr__(self): return self.__str__() #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def __unicode__(self): return ustring(self.__str__()) #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def __del__(self): if self.fileobj: try: self.close() except Exception: pass
gpl-3.0
simobasso/ansible
test/units/parsing/vault/test_vault.py
60
5974
# (c) 2012-2014, Michael DeHaan <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import getpass import os import shutil import time import tempfile import six from binascii import unhexlify from binascii import hexlify from nose.plugins.skip import SkipTest from ansible.compat.tests import unittest from ansible.utils.unicode import to_bytes, to_unicode from ansible import errors from ansible.parsing.vault import VaultLib # Counter import fails for 2.0.1, requires >= 2.6.1 from pip try: from Crypto.Util import Counter HAS_COUNTER = True except ImportError: HAS_COUNTER = False # KDF import fails for 2.0.1, requires >= 2.6.1 from pip try: from Crypto.Protocol.KDF import PBKDF2 HAS_PBKDF2 = True except ImportError: HAS_PBKDF2 = False # AES IMPORTS try: from Crypto.Cipher import AES as AES HAS_AES = True except ImportError: HAS_AES = False class TestVaultLib(unittest.TestCase): def test_methods_exist(self): v = VaultLib('ansible') slots = ['is_encrypted', 'encrypt', 'decrypt', '_format_output', '_split_header',] for slot in slots: assert hasattr(v, slot), "VaultLib is missing the %s method" % slot def test_is_encrypted(self): v = VaultLib(None) assert not v.is_encrypted(u"foobar"), "encryption check on plaintext failed" data = u"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible") assert v.is_encrypted(data), "encryption check on headered text failed" def test_format_output(self): v = VaultLib('ansible') v.cipher_name = "TEST" sensitive_data = b"ansible" data = v._format_output(sensitive_data) lines = data.split(b'\n') assert len(lines) > 1, "failed to properly add header" header = to_bytes(lines[0]) assert header.endswith(b';TEST'), "header does end with cipher name" header_parts = header.split(b';') assert len(header_parts) == 3, "header has the wrong number of parts" assert header_parts[0] == b'$ANSIBLE_VAULT', "header does not start with $ANSIBLE_VAULT" assert header_parts[1] == v.b_version, "header version is incorrect" assert header_parts[2] == b'TEST', "header does end with cipher name" def test_split_header(self): v = VaultLib('ansible') data = b"$ANSIBLE_VAULT;9.9;TEST\nansible" rdata = v._split_header(data) lines = rdata.split(b'\n') assert lines[0] == b"ansible" assert v.cipher_name == 'TEST', "cipher name was not set" assert v.b_version == b"9.9" def test_encrypt_decrypt_aes(self): if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2: raise SkipTest v = VaultLib('ansible') v.cipher_name = u'AES' # AES encryption code has been removed, so this is old output for # AES-encrypted 'foobar' with password 'ansible'. enc_data = b'$ANSIBLE_VAULT;1.1;AES\n53616c7465645f5fc107ce1ef4d7b455e038a13b053225776458052f8f8f332d554809d3f150bfa3\nfe3db930508b65e0ff5947e4386b79af8ab094017629590ef6ba486814cf70f8e4ab0ed0c7d2587e\n786a5a15efeb787e1958cbdd480d076c\n' dec_data = v.decrypt(enc_data) assert dec_data == b"foobar", "decryption failed" def test_encrypt_decrypt_aes256(self): if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2: raise SkipTest v = VaultLib('ansible') v.cipher_name = 'AES256' enc_data = v.encrypt(b"foobar") dec_data = v.decrypt(enc_data) assert enc_data != b"foobar", "encryption failed" assert dec_data == b"foobar", "decryption failed" def test_encrypt_encrypted(self): if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2: raise SkipTest v = VaultLib('ansible') v.cipher_name = 'AES' data = "$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(six.b("ansible")) error_hit = False try: enc_data = v.encrypt(data) except errors.AnsibleError as e: error_hit = True assert error_hit, "No error was thrown when trying to encrypt data with a header" def test_decrypt_decrypted(self): if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2: raise SkipTest v = VaultLib('ansible') data = "ansible" error_hit = False try: dec_data = v.decrypt(data) except errors.AnsibleError as e: error_hit = True assert error_hit, "No error was thrown when trying to decrypt data without a header" def test_cipher_not_set(self): # not setting the cipher should default to AES256 if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2: raise SkipTest v = VaultLib('ansible') data = "ansible" error_hit = False try: enc_data = v.encrypt(data) except errors.AnsibleError as e: error_hit = True assert not error_hit, "An error was thrown when trying to encrypt data without the cipher set" assert v.cipher_name == "AES256", "cipher name is not set to AES256: %s" % v.cipher_name
gpl-3.0
ClaudioNahmad/Servicio-Social
Parametros/CosmoMC/prerrequisitos/plc-2.0/build/pyfits-3.2.2/lib/pyfits/core.py
3
5534
#!/usr/bin/env python # $Id$ """ A module for reading and writing FITS files and manipulating their contents. A module for reading and writing Flexible Image Transport System (FITS) files. This file format was endorsed by the International Astronomical Union in 1999 and mandated by NASA as the standard format for storing high energy astrophysics data. For details of the FITS standard, see the NASA/Science Office of Standards and Technology publication, NOST 100-2.0. For detailed examples of usage, see the `PyFITS User's Manual <http://stsdas.stsci.edu/download/wikidocs/The_PyFITS_Handbook.pdf>`_. """ # The existing unit tests, anyways, only require this in pyfits.hdu.table, # but we should still leave new division here too in order to avoid any nasty # surprises from __future__ import division # confidence high """ Do you mean: "Profits"? - Google Search, when asked for "PyFITS" """ import os import sys import warnings import pyfits.py3compat # Public API compatibility imports import pyfits.card import pyfits.column import pyfits.convenience import pyfits.diff import pyfits.hdu from pyfits.card import * from pyfits.column import * from pyfits.convenience import * from pyfits.diff import * from pyfits.fitsrec import FITS_record, FITS_rec from pyfits.hdu import * from pyfits.hdu.hdulist import fitsopen as open from pyfits.hdu.image import Section from pyfits.hdu.table import new_table from pyfits.header import Header # Additional imports used by the documentation (some of which should be # restructured at some point) from pyfits.verify import VerifyError # Set module-global boolean variables--these variables can also get their # values from environment variables GLOBALS = [ # Variable name # Default ('ENABLE_RECORD_VALUED_KEYWORD_CARDS', True), ('EXTENSION_NAME_CASE_SENSITIVE', False), ('STRIP_HEADER_WHITESPACE', True), ('USE_MEMMAP', True) ] for varname, default in GLOBALS: try: locals()[varname] = bool(int(os.environ.get('PYFITS_' + varname, default))) except ValueError: locals()[varname] = default __all__ = (pyfits.card.__all__ + pyfits.column.__all__ + pyfits.convenience.__all__ + pyfits.diff.__all__ + pyfits.hdu.__all__ + ['FITS_record', 'FITS_rec', 'open', 'Section', 'new_table', 'Header', 'VerifyError', 'TRUE', 'FALSE'] + [g[0] for g in GLOBALS]) # These are of course deprecated, but a handful of external code still uses # them TRUE = True FALSE = False # Warnings routines _formatwarning = warnings.formatwarning def formatwarning(message, category, filename, lineno, line=None): if issubclass(category, UserWarning): return unicode(message) + '\n' else: if sys.version_info[:2] < (2, 6): # Python versions prior to 2.6 don't support the line argument return _formatwarning(message, category, filename, lineno) else: return _formatwarning(message, category, filename, lineno, line) warnings.formatwarning = formatwarning warnings.filterwarnings('always', category=UserWarning, append=True) # This is a workaround for a bug that appears in some versions of Python 2.5 if sys.version_info[:2] < (2, 6): import urllib class ErrorURLopener(urllib.FancyURLopener): """A class to use with `urlretrieve` to allow `IOError` exceptions to be raised when a file specified by a URL cannot be accessed. """ def http_error_default(self, url, fp, errcode, errmsg, headers): raise IOError((errcode, errmsg, url)) urllib._urlopener = ErrorURLopener() # Assign the locally subclassed opener # class to the urllibrary urllib._urlopener.tempcache = {} # Initialize tempcache with an empty # dictionary to enable file cacheing __credits__ = """ Copyright (C) 2004 Association of Universities for Research in Astronomy (AURA) Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name of AURA and its representatives may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """
gpl-3.0
317070/kaggle-heart
ira/configurations/gauss_roi10_maxout.py
1
9185
from collections import namedtuple import lasagne as nn from lasagne.layers.dnn import Conv2DDNNLayer, MaxPool2DDNNLayer import data_iterators import numpy as np import theano.tensor as T from functools import partial import utils_heart import nn_heart from pathfinder import PKL_TRAIN_DATA_PATH, TRAIN_LABELS_PATH, PKL_VALIDATE_DATA_PATH import utils import data caching = None restart_from_save = None rng = np.random.RandomState(42) patch_size = (64, 64) train_transformation_params = { 'patch_size': patch_size, 'mm_patch_size': (128, 128), 'mask_roi': True, 'rotation_range': (-180, 180), 'translation_range_x': (-5, 10), 'translation_range_y': (-10, 10), 'shear_range': (0, 0), 'roi_scale_range': (0.95, 1.3), 'zoom_range': (1 / 1.5, 1.5), 'do_flip': True, 'sequence_shift': False } valid_transformation_params = { 'patch_size': patch_size, 'mm_patch_size': (128, 128), 'mask_roi': True, } test_transformation_params = { 'patch_size': patch_size, 'mm_patch_size': (128, 128), 'mask_roi': True, 'rotation_range': (-180, 180), 'translation_range_x': (-5, 10), 'translation_range_y': (-10, 10), 'shear_range': (0, 0), 'roi_scale_range': (0.95, 1.3), 'zoom_range': (1., 1.), 'do_flip': True, 'sequence_shift': False } data_prep_fun = data.transform_norm_rescale_after batch_size = 32 nbatches_chunk = 16 chunk_size = batch_size * nbatches_chunk train_valid_ids = utils.get_train_valid_split(PKL_TRAIN_DATA_PATH) train_data_iterator = data_iterators.SliceNormRescaleDataGenerator(data_path=PKL_TRAIN_DATA_PATH, batch_size=chunk_size, transform_params=train_transformation_params, patient_ids=train_valid_ids['train'], labels_path=TRAIN_LABELS_PATH, slice2roi_path='pkl_train_slice2roi_10.pkl', full_batch=True, random=True, infinite=True, data_prep_fun=data_prep_fun) valid_data_iterator = data_iterators.SliceNormRescaleDataGenerator(data_path=PKL_TRAIN_DATA_PATH, batch_size=chunk_size, transform_params=valid_transformation_params, patient_ids=train_valid_ids['valid'], labels_path=TRAIN_LABELS_PATH, slice2roi_path='pkl_train_slice2roi_10.pkl', full_batch=False, random=False, infinite=False, data_prep_fun=data_prep_fun) test_data_iterator = data_iterators.SliceNormRescaleDataGenerator(data_path=PKL_VALIDATE_DATA_PATH, batch_size=chunk_size, transform_params=test_transformation_params, slice2roi_path='pkl_validate_slice2roi_10.pkl', full_batch=False, random=False, infinite=False, data_prep_fun=data_prep_fun) nchunks_per_epoch = train_data_iterator.nsamples / chunk_size max_nchunks = nchunks_per_epoch * 150 learning_rate_schedule = { 0: 0.0002, int(max_nchunks * 0.1): 0.0001, int(max_nchunks * 0.3): 0.000075, int(max_nchunks * 0.6): 0.00005, int(max_nchunks * 0.9): 0.00001 } validate_every = 2 * nchunks_per_epoch save_every = 2 * nchunks_per_epoch conv3 = partial(Conv2DDNNLayer, stride=(1, 1), pad="same", filter_size=(3, 3), nonlinearity=nn.nonlinearities.very_leaky_rectify, b=nn.init.Constant(0.1), W=nn.init.Orthogonal("relu")) max_pool = partial(MaxPool2DDNNLayer, pool_size=(2, 2), stride=(2, 2)) def build_model(l_in=None): l_in = nn.layers.InputLayer((None, 30) + patch_size) if not l_in else l_in l = conv3(l_in, num_filters=128) l = conv3(l, num_filters=128) l = max_pool(l) l = conv3(l, num_filters=128) l = conv3(l, num_filters=128) l = max_pool(l) l = conv3(l, num_filters=256) l = conv3(l, num_filters=256) l = conv3(l, num_filters=256) l = max_pool(l) l = conv3(l, num_filters=512) l = conv3(l, num_filters=512) l = conv3(l, num_filters=512) l = max_pool(l) l = conv3(l, num_filters=512) l = conv3(l, num_filters=512) l = conv3(l, num_filters=512) l = max_pool(l) l_d01 = nn.layers.DenseLayer(l, num_units=1024, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=None) l_d01 = nn.layers.FeaturePoolLayer(l_d01, pool_size=2) l_d02 = nn.layers.DenseLayer(nn.layers.dropout(l_d01), num_units=1024, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=None) l_d02 = nn.layers.FeaturePoolLayer(l_d02, pool_size=2) mu0 = nn.layers.DenseLayer(nn.layers.dropout(l_d02), num_units=1, W=nn.init.Orthogonal(), b=nn.init.Constant(50), nonlinearity=nn_heart.lb_softplus()) sigma0 = nn.layers.DenseLayer(nn.layers.dropout(l_d02), num_units=1, W=nn.init.Orthogonal(), b=nn.init.Constant(10), nonlinearity=nn_heart.lb_softplus()) l_cdf0 = nn_heart.NormalCDFLayer(mu0, sigma0, sigma_logscale=False, mu_logscale=False) # --------------------------------------------------------------- l_d11 = nn.layers.DenseLayer(l, num_units=1024, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=None) l_d11 = nn.layers.FeaturePoolLayer(l_d11, pool_size=2) l_d12 = nn.layers.DenseLayer(nn.layers.dropout(l_d11), num_units=1024, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=None) l_d12 = nn.layers.FeaturePoolLayer(l_d12, pool_size=2) mu1 = nn.layers.DenseLayer(nn.layers.dropout(l_d12), num_units=1, W=nn.init.Orthogonal(), b=nn.init.Constant(100), nonlinearity=nn_heart.lb_softplus()) sigma1 = nn.layers.DenseLayer(nn.layers.dropout(l_d12), num_units=1, W=nn.init.Orthogonal(), b=nn.init.Constant(10), nonlinearity=nn_heart.lb_softplus()) l_cdf1 = nn_heart.NormalCDFLayer(mu1, sigma1, sigma_logscale=False, mu_logscale=False) l_outs = [l_cdf0, l_cdf1] l_top = nn.layers.MergeLayer(l_outs) l_target_mu0 = nn.layers.InputLayer((None, 1)) l_target_mu1 = nn.layers.InputLayer((None, 1)) l_targets = [l_target_mu0, l_target_mu1] dense_layers = [l_d01, l_d02, l_d11, l_d12, mu0, sigma0, mu0, mu1] mu_layers = [mu0, mu1] sigma_layers = [sigma0, sigma1] return namedtuple('Model', ['l_ins', 'l_outs', 'l_targets', 'l_top', 'dense_layers', 'mu_layers', 'sigma_layers'])( [l_in], l_outs, l_targets, l_top, dense_layers, mu_layers, sigma_layers) def build_objective(model, deterministic=False): p0 = nn.layers.get_output(model.l_outs[0], deterministic=deterministic) t0 = nn.layers.get_output(model.l_targets[0]) t0_heaviside = nn_heart.heaviside(t0) crps0 = T.mean((p0 - t0_heaviside) ** 2) p1 = nn.layers.get_output(model.l_outs[1], deterministic=deterministic) t1 = nn.layers.get_output(model.l_targets[1]) t1_heaviside = nn_heart.heaviside(t1) crps1 = T.mean((p1 - t1_heaviside) ** 2) return 0.5 * (crps0 + crps1) def build_updates(train_loss, model, learning_rate): updates = nn.updates.adam(train_loss, nn.layers.get_all_params(model.l_top), learning_rate) return updates def get_mean_validation_loss(batch_predictions, batch_targets): return [0, 0] def get_mean_crps_loss(batch_predictions, batch_targets, batch_ids): nbatches = len(batch_predictions) npredictions = len(batch_predictions[0]) crpss = [] for i in xrange(npredictions): p, t = [], [] for j in xrange(nbatches): p.append(batch_predictions[j][i]) t.append(batch_targets[j][i]) p, t = np.vstack(p), np.vstack(t) target_cdf = utils_heart.heaviside_function(t) crpss.append(np.mean((p - target_cdf) ** 2)) return crpss def get_avg_patient_predictions(batch_predictions, batch_patient_ids, mean): return utils_heart.get_patient_average_cdf_predictions(batch_predictions, batch_patient_ids, mean)
mit
mingwpy/numpy
numpy/lib/__init__.py
114
1146
from __future__ import division, absolute_import, print_function import math from .info import __doc__ from numpy.version import version as __version__ from .type_check import * from .index_tricks import * from .function_base import * from .nanfunctions import * from .shape_base import * from .stride_tricks import * from .twodim_base import * from .ufunclike import * from . import scimath as emath from .polynomial import * #import convertcode from .utils import * from .arraysetops import * from .npyio import * from .financial import * from .arrayterator import * from .arraypad import * from ._version import * __all__ = ['emath', 'math'] __all__ += type_check.__all__ __all__ += index_tricks.__all__ __all__ += function_base.__all__ __all__ += shape_base.__all__ __all__ += stride_tricks.__all__ __all__ += twodim_base.__all__ __all__ += ufunclike.__all__ __all__ += arraypad.__all__ __all__ += polynomial.__all__ __all__ += utils.__all__ __all__ += arraysetops.__all__ __all__ += npyio.__all__ __all__ += financial.__all__ __all__ += nanfunctions.__all__ from numpy.testing import Tester test = Tester().test bench = Tester().bench
bsd-3-clause
chouseknecht/ansible
test/units/modules/network/nxos/test_nxos_nxapi.py
68
3057
# (c) 2016 Red Hat Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type from units.compat.mock import patch from ansible.modules.network.nxos import nxos_nxapi from .nxos_module import TestNxosModule, load_fixture, set_module_args class TestNxosNxapiModule(TestNxosModule): module = nxos_nxapi def setUp(self): super(TestNxosNxapiModule, self).setUp() self.mock_run_commands = patch('ansible.modules.network.nxos.nxos_nxapi.run_commands') self.run_commands = self.mock_run_commands.start() self.mock_load_config = patch('ansible.modules.network.nxos.nxos_nxapi.load_config') self.load_config = self.mock_load_config.start() self.mock_get_capabilities = patch('ansible.modules.network.nxos.nxos_nxapi.get_capabilities') self.get_capabilities = self.mock_get_capabilities.start() self.get_capabilities.return_value = {'device_info': {'network_os_platform': 'N7K-C7018', 'network_os_version': '8.3(1)'}, 'network_api': 'cliconf'} def tearDown(self): super(TestNxosNxapiModule, self).tearDown() self.mock_run_commands.stop() self.mock_load_config.stop() self.mock_get_capabilities.stop() def load_fixtures(self, commands=None, device=''): def load_from_file(*args, **kwargs): module, commands = args module_name = self.module.__name__.rsplit('.', 1)[1] output = list() for command in commands: filename = str(command).split(' | ')[0].replace(' ', '_') output.append(load_fixture(module_name, filename, device)) return output self.run_commands.side_effect = load_from_file self.load_config.return_value = None def test_nxos_nxapi_no_change(self): set_module_args(dict(http=True, https=False, http_port=80, https_port=443, sandbox=False)) self.execute_module_devices(changed=False, commands=[]) def test_nxos_nxapi_disable(self): set_module_args(dict(state='absent')) self.execute_module_devices(changed=True, commands=['no feature nxapi']) def test_nxos_nxapi_no_http(self): set_module_args(dict(https=True, http=False, https_port=8443)) self.execute_module_devices(changed=True, commands=['no nxapi http', 'nxapi https port 8443'])
gpl-3.0
ChopChopKodi/pelisalacarta
python/main-classic/channels/piratestreaming.py
3
12423
# -*- coding: utf-8 -*- #------------------------------------------------------------ # pelisalacarta - XBMC Plugin # Canal para piratestreaming # http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/ #------------------------------------------------------------ import re import sys import urllib2 import urlparse from core import config from core import logger from core import scrapertools from core.item import Item DEBUG = config.get_setting("debug") def mainlist(item): logger.info("[piratestreaming.py] mainlist") itemlist = [] itemlist.append( Item(channel=item.channel, title="Novità" , action="peliculas", url="http://www.piratestreaming.co/film-aggiornamenti.php")) itemlist.append( Item(channel=item.channel, title="Per genere" , action="categorias", url="http://www.piratestreaming.co/")) itemlist.append( Item(channel=item.channel, title="Cerca", action="search")) return itemlist def search(item,texto): logger.info("[piratestreaming.py] search "+texto) itemlist = [] texto = texto.replace(" ","%20") item.url = "http://www.piratestreaming.co/cerca.php?all="+texto item.extra = "" try: return peliculas(item) # Se captura la excepción, para no interrumpir al buscador global si un canal falla except: import sys for line in sys.exc_info(): logger.error( "%s" % line ) return [] def categorias(item): ''' <a href="#">Film</a> <ul> <li><a href="http://www.piratestreaming.co/film-aggiornamenti.php">AGGIORNAMENTI</a></li> <li><a href="http://www.web-streaming-mania.net/" target=_blank><strong><font color="red">&#171;FILM PORNO&#187;</font></a></strong></li> <li><a href="http://www.piratestreaming.co/categoria/film/animazione.html">ANIMAZIONE</a></li> <li><a href="http://www.piratestreaming.co/categoria/film/avventura.html">AVVENTURA</a></li> <li><a href="http://www.piratestreaming.co/categoria/film/azione.html">AZIONE</a></li> <li><a href="http://www.piratestreaming.co/categoria/film/biografico.html">BIOGRAFICO</a></li> <li><a href="http://www.piratestreaming.co/categoria/film/comico.html">COMICO</a></li> <li><a href="http://www.piratestreaming.co/categoria/film/commedia.html">COMMEDIA</a></li> <li><a href="http://www.piratestreaming.co/categoria/film/documentario.html">DOCUMENTARIO</a></li> <li><a href="http://www.piratestreaming.co/categoria/film/drammatico.html">DRAMMATICO</a></li> <li><a href="http://www.piratestreaming.co/categoria/film/erotico.html">EROTICO</a></li> <li><a href="http://www.piratestreaming.co/categoria/film/fantascienza.html">FANTASCIENZA</a></li> <li><a href="http://www.piratestreaming.co/categoria/film/fantasy.html">FANTASY</a></li> <li><a href="http://www.piratestreaming.co/categoria/film/giallo.html">GIALLO</a></li> <li><a href="http://www.piratestreaming.co/categoria/film/grottesco.html">GROTTESCO</a></li> <li><a href="http://www.piratestreaming.co/categoria/film/guerra.html">GUERRA</a></li> <li><a href="http://www.piratestreaming.co/categoria/film/horror.html">HORROR</a></li> <li><a href="http://www.piratestreaming.co/categoria/film/musical.html">MUSICAL</a></li> <li><a href="http://www.piratestreaming.co/categoria/film/poliziesco.html">POLIZIESCO</a></li> <li><a href="http://www.piratestreaming.co/categoria/film/romantico.html">ROMANTICO</a></li> <li><a href="http://www.piratestreaming.co/categoria/film/romanzo.html">ROMANZO</a></li> <li><a href="http://www.piratestreaming.co/categoria/film/sentimentale.html">SENTIMENTALE</a></li> <li><a href="http://www.piratestreaming.co/categoria/film/storico.html">STORICO</a></li> <li><a href="http://www.piratestreaming.co/categoria/film/thriller.html">THRILLER</a></li> <li><a href="http://www.piratestreaming.co/categoria/film/western.html">WESTERN</a></li> </ul> ''' itemlist = [] data = scrapertools.cache_page(item.url) data = scrapertools.get_match(data,'<a href="#">Film</a>[^<]+<ul>(.*?)</ul>' ) patron = '<li><a href="([^"]+)">([^<]+)</a></li>' matches = re.compile(patron,re.DOTALL).findall(data) scrapertools.printMatches(matches) for scrapedurl,scrapedtitle in matches: scrapedplot = "" scrapedthumbnail = "" if (DEBUG): logger.info("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]") itemlist.append( Item(channel=item.channel, action="peliculas", title=scrapedtitle , url=scrapedurl , thumbnail=scrapedthumbnail , plot=scrapedplot , folder=True) ) return itemlist def peliculas(item): logger.info("[piratestreaming.py] peliculas") itemlist = [] # Descarga la p�gina data = scrapertools.cachePage(item.url) # Extrae las entradas (carpetas) ''' <div class="featuredItem"> <a href=http://www.imagerip.net/images/ilregnodig.jpg class="featuredImg img" rel="featured"><img src=http://www.imagerip.net/images/ilregnodig.jpg alt="featured item" style="width: 80.8px; height: 109.6px;" /></a> <div class="featuredText"> <b><a href=http://www.piratestreaming.co/film/il-regno-di-gia-la-leggenda-dei-guardiani-streaming-ita.html>Il Regno di Ga' Hoole La leggenda dei guardiani Ita </a></b> <br /><g:plusone size="small" href=http://www.piratestreaming.co/film/il-regno-di-gia-la-leggenda-dei-guardiani-streaming-ita.html></g:plusone> <div id="fb-root"></div><fb:like href="http://www.piratestreaming.co/film/il-regno-di-gia-la-leggenda-dei-guardiani-streaming-ita.html" send="false" layout="button_count" show_faces="false" action="like" colorscheme="dark" font=""></fb:like> </b> </div> </div> <div class="featuredItem"> <a href="http://www.piratestreaming.co/film/paris-manhattan.html" class="featuredImg img rounded" rel="featured" style="border-top-left-radius: 4px; border-top-right-radius: 4px; border-bottom-right-radius: 4px; border-bottom-left-radius: 4px; "> <img src="http://www.imagerip.net/images/Of6FN.jpg" alt="Locandina Film" style="width: 80.8px; height: 109.6px;"></a> <div class="featuredText"> <b> <a href="http://www.piratestreaming.co/film/paris-manhattan.html">Paris Manhattan </a><br><div style="height: 15px; width: 70px; display: inline-block; text-indent: 0px; margin: 0px; padding: 0px; background-color: transparent; border-style: none; float: none; line-height: normal; font-size: 1px; vertical-align: baseline; background-position: initial initial; background-repeat: initial initial; " id="___plusone_0"><iframe allowtransparency="true" frameborder="0" hspace="0" marginheight="0" marginwidth="0" scrolling="no" style="position: static; top: 0px; width: 70px; margin: 0px; border-style: none; left: 0px; visibility: visible; height: 15px; " tabindex="0" vspace="0" width="100%" id="I0_1352901511754" name="I0_1352901511754" src="https://plusone.google.com/_/+1/fastbutton?bsv&amp;size=small&amp;hl=en-US&amp;origin=http%3A%2F%2Fwww.piratestreaming.com&amp;url=http%3A%2F%2Fwww.piratestreaming.com%2Ffilm%2Fparis-manhattan.html&amp;jsh=m%3B%2F_%2Fapps-static%2F_%2Fjs%2Fgapi%2F__features__%2Frt%3Dj%2Fver%3Dmq7ez1ykxXY.it.%2Fsv%3D1%2Fam%3D!9YrXPIrxx2-ITyEIjA%2Fd%3D1%2Frs%3DAItRSTOgKZowsoksby8_wLnRD0d_umAXMQ#_methods=onPlusOne%2C_ready%2C_close%2C_open%2C_resizeMe%2C_renderstart%2Concircled&amp;id=I0_1352901511754&amp;parent=http%3A%2F%2Fwww.piratestreaming.com" title="+1"></iframe></div> <div id="fb-root"></div><fb:like href="http://www.piratestreaming.co/film/paris-manhattan.html" send="false" layout="button_count" show_faces="false" action="like" colorscheme="dark" font="" fb-xfbml-state="rendered" class="fb_edge_widget_with_comment fb_iframe_widget"><span style="height: 20px; width: 98px; "><iframe id="f2834df314" name="f2e5c9573" scrolling="no" style="border: none; overflow: hidden; height: 20px; width: 98px; " title="Like this content on Facebook." class="fb_ltr" src="http://www.facebook.com/plugins/like.php?api_key=&amp;locale=it_IT&amp;sdk=joey&amp;channel_url=http%3A%2F%2Fstatic.ak.facebook.com%2Fconnect%2Fxd_arbiter.php%3Fversion%3D17%23cb%3Df2495f47c%26origin%3Dhttp%253A%252F%252Fwww.piratestreaming.com%252Ff153526b2c%26domain%3Dwww.piratestreaming.com%26relation%3Dparent.parent&amp;href=http%3A%2F%2Fwww.piratestreaming.com%2Ffilm%2Fparis-manhattan.html&amp;node_type=link&amp;width=90&amp;layout=button_count&amp;colorscheme=dark&amp;action=like&amp;show_faces=false&amp;send=false&amp;extended_social_context=false"></iframe></span></fb:like> <a href="http://www.piratestreaming.co/video1" target="_blank" rel="nofollow"><img src="http://www.imagerip.net/images/W57R.png"></a> </b> </div> </div> ''' patron = '<div class="featuredItem">\s*' patron += '<a[^>]*>' patron += '<img src="([^"]+)"[^<]+</a>[^<]+' patron += '<div class="featuredText">.*?' patron += '<a href=([^>]+)>([^<]+)</a>' matches = re.compile(patron,re.DOTALL).findall(data) scrapertools.printMatches(matches) for scrapedthumbnail,scrapedurl,scrapedtitle in matches: scrapedplot = "" logger.info("scrapedurl="+scrapedurl) if scrapedurl.startswith("\""): scrapedurl=scrapedurl[1:-1] logger.info("scrapedurl="+scrapedurl) try: res = urllib2.urlopen(scrapedurl) daa = res.read() da = daa.split('justify;">'); da = da[1].split('</p>') scrapedplot = scrapertools.htmlclean(da[0]).strip() except: scrapedplot= "Trama non disponibile" if (DEBUG): logger.info("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]") itemlist.append( Item(channel=item.channel, action="findvideos", title=scrapedtitle , url=scrapedurl , thumbnail=scrapedthumbnail , plot=scrapedplot , folder=True) ) ''' <div class="featuredItem"> <a href=http://www.piratestreaming.co/film/supercondriaco-ridere-fa-bene-alla-salute.html class="featuredImg img" rel="featured"><img src=http://imagerip.net/images/2014/06/19/Supercondriaco.jpg alt="featured item" style="width: 80.8px; height: 109.6px;" /></a> <div class="featuredText"> <b><a href=http://www.piratestreaming.co/film/supercondriaco-ridere-fa-bene-alla-salute.html>Supercondriaco - Ridere fa bene alla salute </b><br /><g:plusone size="medium" href=http://www.piratestreaming.co/film/supercondriaco-ridere-fa-bene-alla-salute.html rel="nofollow"></g:plusone> <div id="fb-root"></div><fb:like href="http://www.piratestreaming.co/film/supercondriaco-ridere-fa-bene-alla-salute.html" send="false" layout="button_count" show_faces="false" action="like" colorscheme="dark" font=""></fb:like> </div> </div> ''' patron = '<div class="featuredItem"[^<]+' patron += '<a href=(.*?) [^<]+' patron += '<img src=(.*?) [^<]+</a[^<]+' patron += '<div class="featuredText"[^<]+' patron += '<b><a[^>]+>([^<]+)' matches = re.compile(patron,re.DOTALL).findall(data) scrapertools.printMatches(matches) for scrapedurl,scrapedthumbnail,scrapedtitle in matches: logger.info("scrapedurl="+scrapedurl) if scrapedurl.startswith("\""): scrapedurl=scrapedurl[1:-1] logger.info("scrapedurl="+scrapedurl) try: res = urllib2.urlopen(scrapedurl) daa = res.read() da = daa.split('justify;">'); da = da[1].split('</p>') scrapedplot = scrapertools.htmlclean(da[0]).strip() except: scrapedplot= "Trama non disponibile" if (DEBUG): logger.info("title=["+scrapedtitle+"], url=["+scrapedurl+"], plot=["+scrapedplot+"], thumbnail=["+scrapedthumbnail+"]") itemlist.append( Item(channel=item.channel, action="findvideos", title=scrapedtitle , url=scrapedurl , thumbnail=scrapedthumbnail , plot=scrapedplot , folder=True) ) # Extrae el paginador patronvideos = '<td align="center">[^<]+</td>[^<]+<td align="center">\s*<a href="([^"]+)">[^<]+</a>' matches = re.compile(patronvideos,re.DOTALL).findall(data) scrapertools.printMatches(matches) if len(matches)>0: scrapedurl = urlparse.urljoin(item.url,matches[0]) itemlist.append( Item(channel=item.channel, action="peliculas", title="Next Page >>" , url=scrapedurl , folder=True) ) return itemlist
gpl-3.0
SafeW3rd/Ciphers
primeSieve.py
1
1139
# Prime Number Sieve # http://inventwithpython.com/hacking (BSD Licensed) import math def isPrime(num): # Returns True if num is a prime number, otherwise False. # Note: Generally, isPrime() is slower than primeSieve(). # all numbers less than 2 are not prime if num < 2: return False # see if num is divisible by any number up to the square root of num for i in range(2, int(math.sqrt(num)) + 1): if num % i == 0: return False return True def primeSieve(sieveSize): # Returns a list of prime numbers calculated using # the Sieve of Eratosthenes algorithm. sieve = [True] * sieveSize sieve[0] = False # zero and one are not prime numbers sieve[1] = False # create the sieve for i in range(2, int(math.sqrt(sieveSize)) + 1): pointer = i * 2 while pointer < sieveSize: sieve[pointer] = False pointer += i # compile the list of primes primes = [] for i in range(sieveSize): if sieve[i] == True: primes.append(i) return primes
mit
wdv4758h/ZipPy
lib-python/3/tkinter/test/test_tkinter/test_loadtk.py
162
1503
import os import sys import unittest import test.support as test_support from tkinter import Tcl, TclError test_support.requires('gui') class TkLoadTest(unittest.TestCase): @unittest.skipIf('DISPLAY' not in os.environ, 'No $DISPLAY set.') def testLoadTk(self): tcl = Tcl() self.assertRaises(TclError,tcl.winfo_geometry) tcl.loadtk() self.assertEqual('1x1+0+0', tcl.winfo_geometry()) tcl.destroy() def testLoadTkFailure(self): old_display = None if sys.platform.startswith(('win', 'darwin', 'cygwin')): # no failure possible on windows? # XXX Maybe on tk older than 8.4.13 it would be possible, # see tkinter.h. return with test_support.EnvironmentVarGuard() as env: if 'DISPLAY' in os.environ: del env['DISPLAY'] # on some platforms, deleting environment variables # doesn't actually carry through to the process level # because they don't support unsetenv # If that's the case, abort. with os.popen('echo $DISPLAY') as pipe: display = pipe.read().strip() if display: return tcl = Tcl() self.assertRaises(TclError, tcl.winfo_geometry) self.assertRaises(TclError, tcl.loadtk) tests_gui = (TkLoadTest, ) if __name__ == "__main__": test_support.run_unittest(*tests_gui)
bsd-3-clause
pku9104038/edx-platform
common/lib/xmodule/xmodule/open_ended_grading_classes/grading_service_module.py
17
5131
# This class gives a common interface for logging into the grading controller import json import logging import requests from requests.exceptions import RequestException, ConnectionError, HTTPError from .combined_open_ended_rubric import CombinedOpenEndedRubric from lxml import etree log = logging.getLogger(__name__) class GradingServiceError(Exception): """ Exception for grading service. Shown when Open Response Assessment servers cannot be reached. """ pass class GradingService(object): """ Interface to staff grading backend. """ def __init__(self, config): self.username = config['username'] self.password = config['password'] self.session = requests.Session() self.system = config['system'] def _login(self): """ Log into the staff grading service. Raises requests.exceptions.HTTPError if something goes wrong. Returns the decoded json dict of the response. """ response = self.session.post(self.login_url, {'username': self.username, 'password': self.password, }) response.raise_for_status() return response.json() def post(self, url, data, allow_redirects=False): """ Make a post request to the grading controller """ try: op = lambda: self.session.post(url, data=data, allow_redirects=allow_redirects) r = self._try_with_login(op) except (RequestException, ConnectionError, HTTPError) as err: # reraise as promised GradingServiceError, but preserve stacktrace. #This is a dev_facing_error error_string = "Problem posting data to the grading controller. URL: {0}, data: {1}".format(url, data) log.error(error_string) raise GradingServiceError(error_string) return r.text def get(self, url, params, allow_redirects=False): """ Make a get request to the grading controller """ op = lambda: self.session.get(url, allow_redirects=allow_redirects, params=params) try: r = self._try_with_login(op) except (RequestException, ConnectionError, HTTPError) as err: # reraise as promised GradingServiceError, but preserve stacktrace. #This is a dev_facing_error error_string = "Problem getting data from the grading controller. URL: {0}, params: {1}".format(url, params) log.error(error_string) raise GradingServiceError(error_string) return r.text def _try_with_login(self, operation): """ Call operation(), which should return a requests response object. If the request fails with a 'login_required' error, call _login() and try the operation again. Returns the result of operation(). Does not catch exceptions. """ response = operation() resp_json = response.json() if (resp_json and resp_json.get('success') is False and resp_json.get('error') == 'login_required'): # apparrently we aren't logged in. Try to fix that. r = self._login() if r and not r.get('success'): log.warning("Couldn't log into staff_grading backend. Response: %s", r) # try again response = operation() response.raise_for_status() return response def _render_rubric(self, response, view_only=False): """ Given an HTTP Response with the key 'rubric', render out the html required to display the rubric and put it back into the response returns the updated response as a dictionary that can be serialized later """ try: response_json = json.loads(response) except: response_json = response try: if 'rubric' in response_json: rubric = response_json['rubric'] rubric_renderer = CombinedOpenEndedRubric(self.system, view_only) rubric_dict = rubric_renderer.render_rubric(rubric) success = rubric_dict['success'] rubric_html = rubric_dict['html'] response_json['rubric'] = rubric_html return response_json # if we can't parse the rubric into HTML, except etree.XMLSyntaxError, RubricParsingError: #This is a dev_facing_error log.exception("Cannot parse rubric string. Raw string: {0}" .format(rubric)) return {'success': False, 'error': 'Error displaying submission'} except ValueError: #This is a dev_facing_error log.exception("Error parsing response: {0}".format(response)) return {'success': False, 'error': "Error displaying submission"}
agpl-3.0
abaditsegay/arangodb
3rdParty/V8-4.3.61/third_party/python_26/Lib/test/test_shlex.py
179
5300
# -*- coding: iso-8859-1 -*- import unittest import shlex from test import test_support try: from cStringIO import StringIO except ImportError: from StringIO import StringIO # The original test data set was from shellwords, by Hartmut Goebel. data = r"""x|x| foo bar|foo|bar| foo bar|foo|bar| foo bar |foo|bar| foo bar bla fasel|foo|bar|bla|fasel| x y z xxxx|x|y|z|xxxx| \x bar|\|x|bar| \ x bar|\|x|bar| \ bar|\|bar| foo \x bar|foo|\|x|bar| foo \ x bar|foo|\|x|bar| foo \ bar|foo|\|bar| foo "bar" bla|foo|"bar"|bla| "foo" "bar" "bla"|"foo"|"bar"|"bla"| "foo" bar "bla"|"foo"|bar|"bla"| "foo" bar bla|"foo"|bar|bla| foo 'bar' bla|foo|'bar'|bla| 'foo' 'bar' 'bla'|'foo'|'bar'|'bla'| 'foo' bar 'bla'|'foo'|bar|'bla'| 'foo' bar bla|'foo'|bar|bla| blurb foo"bar"bar"fasel" baz|blurb|foo"bar"bar"fasel"|baz| blurb foo'bar'bar'fasel' baz|blurb|foo'bar'bar'fasel'|baz| ""|""| ''|''| foo "" bar|foo|""|bar| foo '' bar|foo|''|bar| foo "" "" "" bar|foo|""|""|""|bar| foo '' '' '' bar|foo|''|''|''|bar| \""|\|""| "\"|"\"| "foo\ bar"|"foo\ bar"| "foo\\ bar"|"foo\\ bar"| "foo\\ bar\"|"foo\\ bar\"| "foo\\" bar\""|"foo\\"|bar|\|""| "foo\\ bar\" dfadf"|"foo\\ bar\"|dfadf"| "foo\\\ bar\" dfadf"|"foo\\\ bar\"|dfadf"| "foo\\\x bar\" dfadf"|"foo\\\x bar\"|dfadf"| "foo\x bar\" dfadf"|"foo\x bar\"|dfadf"| \''|\|''| 'foo\ bar'|'foo\ bar'| 'foo\\ bar'|'foo\\ bar'| "foo\\\x bar\" df'a\ 'df'|"foo\\\x bar\"|df'a|\|'df'| \"foo"|\|"foo"| \"foo"\x|\|"foo"|\|x| "foo\x"|"foo\x"| "foo\ "|"foo\ "| foo\ xx|foo|\|xx| foo\ x\x|foo|\|x|\|x| foo\ x\x\""|foo|\|x|\|x|\|""| "foo\ x\x"|"foo\ x\x"| "foo\ x\x\\"|"foo\ x\x\\"| "foo\ x\x\\""foobar"|"foo\ x\x\\"|"foobar"| "foo\ x\x\\"\''"foobar"|"foo\ x\x\\"|\|''|"foobar"| "foo\ x\x\\"\'"fo'obar"|"foo\ x\x\\"|\|'"fo'|obar"| "foo\ x\x\\"\'"fo'obar" 'don'\''t'|"foo\ x\x\\"|\|'"fo'|obar"|'don'|\|''|t'| 'foo\ bar'|'foo\ bar'| 'foo\\ bar'|'foo\\ bar'| foo\ bar|foo|\|bar| foo#bar\nbaz|foobaz| :-) ;-)|:|-|)|;|-|)| áéíóú|á|é|í|ó|ú| """ posix_data = r"""x|x| foo bar|foo|bar| foo bar|foo|bar| foo bar |foo|bar| foo bar bla fasel|foo|bar|bla|fasel| x y z xxxx|x|y|z|xxxx| \x bar|x|bar| \ x bar| x|bar| \ bar| bar| foo \x bar|foo|x|bar| foo \ x bar|foo| x|bar| foo \ bar|foo| bar| foo "bar" bla|foo|bar|bla| "foo" "bar" "bla"|foo|bar|bla| "foo" bar "bla"|foo|bar|bla| "foo" bar bla|foo|bar|bla| foo 'bar' bla|foo|bar|bla| 'foo' 'bar' 'bla'|foo|bar|bla| 'foo' bar 'bla'|foo|bar|bla| 'foo' bar bla|foo|bar|bla| blurb foo"bar"bar"fasel" baz|blurb|foobarbarfasel|baz| blurb foo'bar'bar'fasel' baz|blurb|foobarbarfasel|baz| ""|| ''|| foo "" bar|foo||bar| foo '' bar|foo||bar| foo "" "" "" bar|foo||||bar| foo '' '' '' bar|foo||||bar| \"|"| "\""|"| "foo\ bar"|foo\ bar| "foo\\ bar"|foo\ bar| "foo\\ bar\""|foo\ bar"| "foo\\" bar\"|foo\|bar"| "foo\\ bar\" dfadf"|foo\ bar" dfadf| "foo\\\ bar\" dfadf"|foo\\ bar" dfadf| "foo\\\x bar\" dfadf"|foo\\x bar" dfadf| "foo\x bar\" dfadf"|foo\x bar" dfadf| \'|'| 'foo\ bar'|foo\ bar| 'foo\\ bar'|foo\\ bar| "foo\\\x bar\" df'a\ 'df"|foo\\x bar" df'a\ 'df| \"foo|"foo| \"foo\x|"foox| "foo\x"|foo\x| "foo\ "|foo\ | foo\ xx|foo xx| foo\ x\x|foo xx| foo\ x\x\"|foo xx"| "foo\ x\x"|foo\ x\x| "foo\ x\x\\"|foo\ x\x\| "foo\ x\x\\""foobar"|foo\ x\x\foobar| "foo\ x\x\\"\'"foobar"|foo\ x\x\'foobar| "foo\ x\x\\"\'"fo'obar"|foo\ x\x\'fo'obar| "foo\ x\x\\"\'"fo'obar" 'don'\''t'|foo\ x\x\'fo'obar|don't| "foo\ x\x\\"\'"fo'obar" 'don'\''t' \\|foo\ x\x\'fo'obar|don't|\| 'foo\ bar'|foo\ bar| 'foo\\ bar'|foo\\ bar| foo\ bar|foo bar| foo#bar\nbaz|foo|baz| :-) ;-)|:-)|;-)| áéíóú|áéíóú| """ class ShlexTest(unittest.TestCase): def setUp(self): self.data = [x.split("|")[:-1] for x in data.splitlines()] self.posix_data = [x.split("|")[:-1] for x in posix_data.splitlines()] for item in self.data: item[0] = item[0].replace(r"\n", "\n") for item in self.posix_data: item[0] = item[0].replace(r"\n", "\n") def splitTest(self, data, comments): for i in range(len(data)): l = shlex.split(data[i][0], comments=comments) self.assertEqual(l, data[i][1:], "%s: %s != %s" % (data[i][0], l, data[i][1:])) def oldSplit(self, s): ret = [] lex = shlex.shlex(StringIO(s)) tok = lex.get_token() while tok: ret.append(tok) tok = lex.get_token() return ret def testSplitPosix(self): """Test data splitting with posix parser""" self.splitTest(self.posix_data, comments=True) def testCompat(self): """Test compatibility interface""" for i in range(len(self.data)): l = self.oldSplit(self.data[i][0]) self.assertEqual(l, self.data[i][1:], "%s: %s != %s" % (self.data[i][0], l, self.data[i][1:])) # Allow this test to be used with old shlex.py if not getattr(shlex, "split", None): for methname in dir(ShlexTest): if methname.startswith("test") and methname != "testCompat": delattr(ShlexTest, methname) def test_main(): test_support.run_unittest(ShlexTest) if __name__ == "__main__": test_main()
apache-2.0
bufferapp/buffer-django-nonrel
django/contrib/sitemaps/tests/basic.py
155
7620
import os from datetime import date from django.conf import settings from django.contrib.auth.models import User from django.contrib.sitemaps import Sitemap from django.contrib.sites.models import Site from django.core.exceptions import ImproperlyConfigured from django.test import TestCase from django.utils.unittest import skipUnless from django.utils.formats import localize from django.utils.translation import activate, deactivate class SitemapTests(TestCase): urls = 'django.contrib.sitemaps.tests.urls' def setUp(self): if Site._meta.installed: self.base_url = 'http://example.com' else: self.base_url = 'http://testserver' self.old_USE_L10N = settings.USE_L10N self.old_Site_meta_installed = Site._meta.installed self.old_TEMPLATE_DIRS = settings.TEMPLATE_DIRS self.old_Site_meta_installed = Site._meta.installed settings.TEMPLATE_DIRS = ( os.path.join(os.path.dirname(__file__), 'templates'), ) # Create a user that will double as sitemap content User.objects.create_user('testuser', '[email protected]', 's3krit') def tearDown(self): settings.USE_L10N = self.old_USE_L10N Site._meta.installed = self.old_Site_meta_installed settings.TEMPLATE_DIRS = self.old_TEMPLATE_DIRS Site._meta.installed = self.old_Site_meta_installed def test_simple_sitemap_index(self): "A simple sitemap index can be rendered" # Retrieve the sitemap. response = self.client.get('/simple/index.xml') # Check for all the important bits: self.assertEqual(response.content, """<?xml version="1.0" encoding="UTF-8"?> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <sitemap><loc>%s/simple/sitemap-simple.xml</loc></sitemap> </sitemapindex> """ % self.base_url) def test_simple_sitemap_custom_index(self): "A simple sitemap index can be rendered with a custom template" # Retrieve the sitemap. response = self.client.get('/simple/custom-index.xml') # Check for all the important bits: self.assertEqual(response.content, """<?xml version="1.0" encoding="UTF-8"?> <!-- This is a customised template --> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <sitemap><loc>%s/simple/sitemap-simple.xml</loc></sitemap> </sitemapindex> """ % self.base_url) def test_simple_sitemap(self): "A simple sitemap can be rendered" # Retrieve the sitemap. response = self.client.get('/simple/sitemap.xml') # Check for all the important bits: self.assertEqual(response.content, """<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <url><loc>%s/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url> </urlset> """ % (self.base_url, date.today().strftime('%Y-%m-%d'))) def test_simple_custom_sitemap(self): "A simple sitemap can be rendered with a custom template" # Retrieve the sitemap. response = self.client.get('/simple/custom-sitemap.xml') # Check for all the important bits: self.assertEqual(response.content, """<?xml version="1.0" encoding="UTF-8"?> <!-- This is a customised template --> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <url><loc>%s/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url> </urlset> """ % (self.base_url, date.today().strftime('%Y-%m-%d'))) @skipUnless(settings.USE_I18N, "Internationalization is not enabled") def test_localized_priority(self): "The priority value should not be localized (Refs #14164)" # Localization should be active settings.USE_L10N = True activate('fr') self.assertEqual(u'0,3', localize(0.3)) # Retrieve the sitemap. Check that priorities # haven't been rendered in localized format response = self.client.get('/simple/sitemap.xml') self.assertContains(response, '<priority>0.5</priority>') self.assertContains(response, '<lastmod>%s</lastmod>' % date.today().strftime('%Y-%m-%d')) deactivate() def test_generic_sitemap(self): "A minimal generic sitemap can be rendered" # Retrieve the sitemap. response = self.client.get('/generic/sitemap.xml') expected = '' for username in User.objects.values_list("username", flat=True): expected += "<url><loc>%s/users/%s/</loc></url>" % (self.base_url, username) # Check for all the important bits: self.assertEqual(response.content, """<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> %s </urlset> """ % expected) @skipUnless("django.contrib.flatpages" in settings.INSTALLED_APPS, "django.contrib.flatpages app not installed.") def test_flatpage_sitemap(self): "Basic FlatPage sitemap test" # Import FlatPage inside the test so that when django.contrib.flatpages # is not installed we don't get problems trying to delete Site # objects (FlatPage has an M2M to Site, Site.delete() tries to # delete related objects, but the M2M table doesn't exist. from django.contrib.flatpages.models import FlatPage public = FlatPage.objects.create( url=u'/public/', title=u'Public Page', enable_comments=True, registration_required=False, ) public.sites.add(settings.SITE_ID) private = FlatPage.objects.create( url=u'/private/', title=u'Private Page', enable_comments=True, registration_required=True ) private.sites.add(settings.SITE_ID) response = self.client.get('/flatpages/sitemap.xml') # Public flatpage should be in the sitemap self.assertContains(response, '<loc>%s%s</loc>' % (self.base_url, public.url)) # Private flatpage should not be in the sitemap self.assertNotContains(response, '<loc>%s%s</loc>' % (self.base_url, private.url)) def test_requestsite_sitemap(self): # Make sure hitting the flatpages sitemap without the sites framework # installed doesn't raise an exception Site._meta.installed = False # Retrieve the sitemap. response = self.client.get('/simple/sitemap.xml') # Check for all the important bits: self.assertEqual(response.content, """<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <url><loc>http://testserver/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url> </urlset> """ % date.today().strftime('%Y-%m-%d')) @skipUnless("django.contrib.sites" in settings.INSTALLED_APPS, "django.contrib.sites app not installed.") def test_sitemap_get_urls_no_site_1(self): """ Check we get ImproperlyConfigured if we don't pass a site object to Sitemap.get_urls and no Site objects exist """ Site.objects.all().delete() self.assertRaises(ImproperlyConfigured, Sitemap().get_urls) def test_sitemap_get_urls_no_site_2(self): """ Check we get ImproperlyConfigured when we don't pass a site object to Sitemap.get_urls if Site objects exists, but the sites framework is not actually installed. """ Site._meta.installed = False self.assertRaises(ImproperlyConfigured, Sitemap().get_urls)
bsd-3-clause
bslatkin/8-bits
appengine-mapreduce/python/test/testlib/testutil.py
2
4505
#!/usr/bin/env python # # Copyright 2010 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Test utilities for mapreduce framework. """ # Disable "Invalid method name" # pylint: disable-msg=C6409 # os_compat must be first to ensure timezones are UTC. # Disable "unused import" and "invalid import order" # pylint: disable-msg=W0611 from google.appengine.tools import os_compat # pylint: enable-msg=W0611 from testlib import mox import os import shutil import sys import tempfile import unittest import urllib from google.appengine.api import apiproxy_stub_map from google.appengine.api.files import file_service_stub from google.appengine.api.blobstore import blobstore_stub from google.appengine.api import datastore_file_stub from google.appengine.api import queueinfo from google.appengine.api.blobstore import file_blob_storage from google.appengine.api.memcache import memcache_stub from google.appengine.api.taskqueue import taskqueue_stub class MatchesDatastoreConfig(mox.Comparator): """Mox comparator for MatchesDatastoreConfig objects.""" def __init__(self, **kwargs): self.kwargs = kwargs def equals(self, config): """Check to see if config matches arguments.""" if self.kwargs.get("deadline", None) != config.deadline: return False if self.kwargs.get("force_writes", None) != config.force_writes: return False return True def __repr__(self): return "MatchesDatastoreConfig(%s)" % self.kwargs class MatchesUserRPC(mox.Comparator): """Mox comparator for UserRPC objects.""" def __init__(self, **kwargs): self.kwargs = kwargs def equals(self, rpc): """Check to see if rpc matches arguments.""" if self.kwargs.get("deadline", None) != rpc.deadline: return False return True def __repr__(self): return "MatchesUserRPC(%s)" % self.kwargs class HandlerTestBase(unittest.TestCase): """Base class for all webapp.RequestHandler tests.""" MAPREDUCE_URL = "/_ah/mapreduce/kickoffjob_callback" def setUp(self): unittest.TestCase.setUp(self) self.mox = mox.Mox() self.appid = "testapp" self.version_id = "1.23456789" os.environ["APPLICATION_ID"] = self.appid os.environ["CURRENT_VERSION_ID"] = self.version_id os.environ["HTTP_HOST"] = "localhost" self.memcache = memcache_stub.MemcacheServiceStub() self.taskqueue = taskqueue_stub.TaskQueueServiceStub() self.taskqueue.queue_yaml_parser = ( lambda x: queueinfo.LoadSingleQueue( "queue:\n" "- name: default\n" " rate: 10/s\n" "- name: crazy-queue\n" " rate: 2000/d\n" " bucket_size: 10\n")) self.datastore = datastore_file_stub.DatastoreFileStub( self.appid, "/dev/null", "/dev/null") self.blob_storage_directory = tempfile.mkdtemp() blob_storage = file_blob_storage.FileBlobStorage( self.blob_storage_directory, self.appid) self.blobstore_stub = blobstore_stub.BlobstoreServiceStub(blob_storage) self.file_service = self.createFileServiceStub(blob_storage) apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap() apiproxy_stub_map.apiproxy.RegisterStub("taskqueue", self.taskqueue) apiproxy_stub_map.apiproxy.RegisterStub("memcache", self.memcache) apiproxy_stub_map.apiproxy.RegisterStub("datastore_v3", self.datastore) apiproxy_stub_map.apiproxy.RegisterStub("blobstore", self.blobstore_stub) apiproxy_stub_map.apiproxy.RegisterStub("file", self.file_service) def createFileServiceStub(self, blob_storage): return file_service_stub.FileServiceStub(blob_storage) def tearDown(self): try: self.mox.VerifyAll() finally: self.mox.UnsetStubs() shutil.rmtree(self.blob_storage_directory) unittest.TestCase.tearDown(self) def assertTaskStarted(self, queue="default"): tasks = self.taskqueue.GetTasks(queue) self.assertEquals(1, len(tasks)) self.assertEquals(tasks[0]["url"], self.MAPREDUCE_URL)
apache-2.0
pedro2d10/SickRage-FR
lib/unidecode/x05f.py
252
4660
data = ( 'Kai ', # 0x00 'Bian ', # 0x01 'Yi ', # 0x02 'Qi ', # 0x03 'Nong ', # 0x04 'Fen ', # 0x05 'Ju ', # 0x06 'Yan ', # 0x07 'Yi ', # 0x08 'Zang ', # 0x09 'Bi ', # 0x0a 'Yi ', # 0x0b 'Yi ', # 0x0c 'Er ', # 0x0d 'San ', # 0x0e 'Shi ', # 0x0f 'Er ', # 0x10 'Shi ', # 0x11 'Shi ', # 0x12 'Gong ', # 0x13 'Diao ', # 0x14 'Yin ', # 0x15 'Hu ', # 0x16 'Fu ', # 0x17 'Hong ', # 0x18 'Wu ', # 0x19 'Tui ', # 0x1a 'Chi ', # 0x1b 'Jiang ', # 0x1c 'Ba ', # 0x1d 'Shen ', # 0x1e 'Di ', # 0x1f 'Zhang ', # 0x20 'Jue ', # 0x21 'Tao ', # 0x22 'Fu ', # 0x23 'Di ', # 0x24 'Mi ', # 0x25 'Xian ', # 0x26 'Hu ', # 0x27 'Chao ', # 0x28 'Nu ', # 0x29 'Jing ', # 0x2a 'Zhen ', # 0x2b 'Yi ', # 0x2c 'Mi ', # 0x2d 'Quan ', # 0x2e 'Wan ', # 0x2f 'Shao ', # 0x30 'Ruo ', # 0x31 'Xuan ', # 0x32 'Jing ', # 0x33 'Dun ', # 0x34 'Zhang ', # 0x35 'Jiang ', # 0x36 'Qiang ', # 0x37 'Peng ', # 0x38 'Dan ', # 0x39 'Qiang ', # 0x3a 'Bi ', # 0x3b 'Bi ', # 0x3c 'She ', # 0x3d 'Dan ', # 0x3e 'Jian ', # 0x3f 'Gou ', # 0x40 'Sei ', # 0x41 'Fa ', # 0x42 'Bi ', # 0x43 'Kou ', # 0x44 'Nagi ', # 0x45 'Bie ', # 0x46 'Xiao ', # 0x47 'Dan ', # 0x48 'Kuo ', # 0x49 'Qiang ', # 0x4a 'Hong ', # 0x4b 'Mi ', # 0x4c 'Kuo ', # 0x4d 'Wan ', # 0x4e 'Jue ', # 0x4f 'Ji ', # 0x50 'Ji ', # 0x51 'Gui ', # 0x52 'Dang ', # 0x53 'Lu ', # 0x54 'Lu ', # 0x55 'Tuan ', # 0x56 'Hui ', # 0x57 'Zhi ', # 0x58 'Hui ', # 0x59 'Hui ', # 0x5a 'Yi ', # 0x5b 'Yi ', # 0x5c 'Yi ', # 0x5d 'Yi ', # 0x5e 'Huo ', # 0x5f 'Huo ', # 0x60 'Shan ', # 0x61 'Xing ', # 0x62 'Wen ', # 0x63 'Tong ', # 0x64 'Yan ', # 0x65 'Yan ', # 0x66 'Yu ', # 0x67 'Chi ', # 0x68 'Cai ', # 0x69 'Biao ', # 0x6a 'Diao ', # 0x6b 'Bin ', # 0x6c 'Peng ', # 0x6d 'Yong ', # 0x6e 'Piao ', # 0x6f 'Zhang ', # 0x70 'Ying ', # 0x71 'Chi ', # 0x72 'Chi ', # 0x73 'Zhuo ', # 0x74 'Tuo ', # 0x75 'Ji ', # 0x76 'Pang ', # 0x77 'Zhong ', # 0x78 'Yi ', # 0x79 'Wang ', # 0x7a 'Che ', # 0x7b 'Bi ', # 0x7c 'Chi ', # 0x7d 'Ling ', # 0x7e 'Fu ', # 0x7f 'Wang ', # 0x80 'Zheng ', # 0x81 'Cu ', # 0x82 'Wang ', # 0x83 'Jing ', # 0x84 'Dai ', # 0x85 'Xi ', # 0x86 'Xun ', # 0x87 'Hen ', # 0x88 'Yang ', # 0x89 'Huai ', # 0x8a 'Lu ', # 0x8b 'Hou ', # 0x8c 'Wa ', # 0x8d 'Cheng ', # 0x8e 'Zhi ', # 0x8f 'Xu ', # 0x90 'Jing ', # 0x91 'Tu ', # 0x92 'Cong ', # 0x93 '[?] ', # 0x94 'Lai ', # 0x95 'Cong ', # 0x96 'De ', # 0x97 'Pai ', # 0x98 'Xi ', # 0x99 '[?] ', # 0x9a 'Qi ', # 0x9b 'Chang ', # 0x9c 'Zhi ', # 0x9d 'Cong ', # 0x9e 'Zhou ', # 0x9f 'Lai ', # 0xa0 'Yu ', # 0xa1 'Xie ', # 0xa2 'Jie ', # 0xa3 'Jian ', # 0xa4 'Chi ', # 0xa5 'Jia ', # 0xa6 'Bian ', # 0xa7 'Huang ', # 0xa8 'Fu ', # 0xa9 'Xun ', # 0xaa 'Wei ', # 0xab 'Pang ', # 0xac 'Yao ', # 0xad 'Wei ', # 0xae 'Xi ', # 0xaf 'Zheng ', # 0xb0 'Piao ', # 0xb1 'Chi ', # 0xb2 'De ', # 0xb3 'Zheng ', # 0xb4 'Zheng ', # 0xb5 'Bie ', # 0xb6 'De ', # 0xb7 'Chong ', # 0xb8 'Che ', # 0xb9 'Jiao ', # 0xba 'Wei ', # 0xbb 'Jiao ', # 0xbc 'Hui ', # 0xbd 'Mei ', # 0xbe 'Long ', # 0xbf 'Xiang ', # 0xc0 'Bao ', # 0xc1 'Qu ', # 0xc2 'Xin ', # 0xc3 'Shu ', # 0xc4 'Bi ', # 0xc5 'Yi ', # 0xc6 'Le ', # 0xc7 'Ren ', # 0xc8 'Dao ', # 0xc9 'Ding ', # 0xca 'Gai ', # 0xcb 'Ji ', # 0xcc 'Ren ', # 0xcd 'Ren ', # 0xce 'Chan ', # 0xcf 'Tan ', # 0xd0 'Te ', # 0xd1 'Te ', # 0xd2 'Gan ', # 0xd3 'Qi ', # 0xd4 'Shi ', # 0xd5 'Cun ', # 0xd6 'Zhi ', # 0xd7 'Wang ', # 0xd8 'Mang ', # 0xd9 'Xi ', # 0xda 'Fan ', # 0xdb 'Ying ', # 0xdc 'Tian ', # 0xdd 'Min ', # 0xde 'Min ', # 0xdf 'Zhong ', # 0xe0 'Chong ', # 0xe1 'Wu ', # 0xe2 'Ji ', # 0xe3 'Wu ', # 0xe4 'Xi ', # 0xe5 'Ye ', # 0xe6 'You ', # 0xe7 'Wan ', # 0xe8 'Cong ', # 0xe9 'Zhong ', # 0xea 'Kuai ', # 0xeb 'Yu ', # 0xec 'Bian ', # 0xed 'Zhi ', # 0xee 'Qi ', # 0xef 'Cui ', # 0xf0 'Chen ', # 0xf1 'Tai ', # 0xf2 'Tun ', # 0xf3 'Qian ', # 0xf4 'Nian ', # 0xf5 'Hun ', # 0xf6 'Xiong ', # 0xf7 'Niu ', # 0xf8 'Wang ', # 0xf9 'Xian ', # 0xfa 'Xin ', # 0xfb 'Kang ', # 0xfc 'Hu ', # 0xfd 'Kai ', # 0xfe 'Fen ', # 0xff )
gpl-3.0
sursum/buckanjaren
buckanjaren/lib/python3.5/site-packages/psycopg2/tests/test_bug_gc.py
16
1723
#!/usr/bin/env python # bug_gc.py - test for refcounting/GC bug # # Copyright (C) 2010-2011 Federico Di Gregorio <[email protected]> # # psycopg2 is free software: you can redistribute it and/or modify it # under the terms of the GNU Lesser General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # In addition, as a special exception, the copyright holders give # permission to link this program with the OpenSSL library (or with # modified versions of OpenSSL that use the same license as OpenSSL), # and distribute linked combinations including the two. # # You must obey the GNU Lesser General Public License in all respects for # all of the code used other than OpenSSL. # # psycopg2 is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public # License for more details. import psycopg2 import psycopg2.extensions import unittest import gc from .testutils import ConnectingTestCase, skip_if_no_uuid class StolenReferenceTestCase(ConnectingTestCase): @skip_if_no_uuid def test_stolen_reference_bug(self): def fish(val, cur): gc.collect() return 42 UUID = psycopg2.extensions.new_type((2950,), "UUID", fish) psycopg2.extensions.register_type(UUID, self.conn) curs = self.conn.cursor() curs.execute("select 'b5219e01-19ab-4994-b71e-149225dc51e4'::uuid") curs.fetchone() def test_suite(): return unittest.TestLoader().loadTestsFromName(__name__) if __name__ == "__main__": unittest.main()
mit
ual/urbansim
urbansim/utils/tests/test_misc.py
5
3159
import os import shutil import numpy as np import pandas as pd import pytest from .. import misc class _FakeTable(object): def __init__(self, name, columns): self.name = name self.columns = columns @pytest.fixture def fta(): return _FakeTable('a', ['aa', 'ab', 'ac']) @pytest.fixture def ftb(): return _FakeTable('b', ['bx', 'by', 'bz']) @pytest.fixture def clean_fake_data_home(request): def fin(): if os.path.isdir('fake_data_home'): shutil.rmtree('fake_data_home') request.addfinalizer(fin) def test_column_map_raises(fta, ftb): with pytest.raises(RuntimeError): misc.column_map([fta, ftb], ['aa', 'by', 'bz', 'cw']) def test_column_map_none(fta, ftb): assert misc.column_map([fta, ftb], None) == {'a': None, 'b': None} def test_column_map(fta, ftb): assert misc.column_map([fta, ftb], ['aa', 'by', 'bz']) == \ {'a': ['aa'], 'b': ['by', 'bz']} assert misc.column_map([fta, ftb], ['by', 'bz']) == \ {'a': [], 'b': ['by', 'bz']} def test_dirs(clean_fake_data_home): misc._mkifnotexists("fake_data_home") os.environ["DATA_HOME"] = "fake_data_home" misc.get_run_number() misc.get_run_number() misc.data_dir() misc.configs_dir() misc.models_dir() misc.charts_dir() misc.maps_dir() misc.simulations_dir() misc.reports_dir() misc.runs_dir() misc.config("test") @pytest.fixture def range_df(): df = pd.DataFrame({'to_zone_id': [2, 3, 4], 'from_zone_id': [1, 1, 1], 'distance': [.1, .2, .9]}) df = df.set_index(['from_zone_id', 'to_zone_id']) return df @pytest.fixture def range_series(): return pd.Series([10, 150, 75, 275], index=[1, 2, 3, 4]) def test_compute_range(range_df, range_series): assert misc.compute_range(range_df, range_series, "distance", .5).loc[1] == 225 def test_reindex(): s = pd.Series([.5, 1.0, 1.5], index=[2, 1, 3]) s2 = pd.Series([1, 2, 3], index=['a', 'b', 'c']) assert list(misc.reindex(s, s2).values) == [1.0, .5, 1.5] def test_naics(): assert misc.naicsname(54) == "Professional" def test_signif(): assert misc.signif(4.0) == '***' assert misc.signif(3.0) == '**' assert misc.signif(2.0) == '*' assert misc.signif(1.5) == '.' assert misc.signif(1.0) == '' @pytest.fixture def simple_dev_inputs(): return pd.DataFrame( {'residential': [40, 40, 40], 'office': [15, 18, 15], 'retail': [12, 10, 10], 'industrial': [12, 12, 12], 'land_cost': [1000000, 2000000, 3000000], 'parcel_size': [10000, 20000, 30000], 'max_far': [2.0, 3.0, 4.0], 'names': ['a', 'b', 'c'], 'max_height': [40, 60, 80]}, index=['a', 'b', 'c']) def test_misc_dffunctions(simple_dev_inputs): misc.df64bitto32bit(simple_dev_inputs) misc.pandasdfsummarytojson(simple_dev_inputs[['land_cost', 'parcel_size']]) misc.numpymat2df(np.array([[1, 2], [3, 4]])) def test_column_list(fta, ftb): assert misc.column_list([fta, ftb], ['aa', 'by', 'bz', 'c']) == \ ['aa', 'by', 'bz']
bsd-3-clause
tienjunhsu/trading-with-python
lib/widgets.py
78
3012
# -*- coding: utf-8 -*- """ A collection of widgets for gui building Copyright: Jev Kuznetsov License: BSD """ from __future__ import division import sys from PyQt4.QtCore import * from PyQt4.QtGui import * import numpy as np from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas from matplotlib.backends.backend_qt4agg import NavigationToolbar2QTAgg as NavigationToolbar from matplotlib.figure import Figure import matplotlib.pyplot as plt class MatplotlibWidget(QWidget): def __init__(self,parent=None,grid=True): QWidget.__init__(self,parent) self.grid = grid self.fig = Figure() self.canvas =FigureCanvas(self.fig) self.canvas.setParent(self) self.canvas.mpl_connect('button_press_event', self.onPick) # bind pick event #self.axes = self.fig.add_subplot(111) margins = [0.05,0.1,0.9,0.8] self.axes = self.fig.add_axes(margins) self.toolbar = NavigationToolbar(self.canvas,self) #self.initFigure() layout = QVBoxLayout() layout.addWidget(self.toolbar) layout.addWidget(self.canvas) self.setLayout(layout) def onPick(self,event): print 'Pick event' print 'you pressed', event.button, event.xdata, event.ydata def update(self): self.canvas.draw() def plot(self,*args,**kwargs): self.axes.plot(*args,**kwargs) self.axes.grid(self.grid) self.update() def clear(self): self.axes.clear() def initFigure(self): self.axes.grid(True) x = np.linspace(-1,1) y = x**2 self.axes.plot(x,y,'o-') class PlotWindow(QMainWindow): ''' a stand-alone window with embedded matplotlib widget ''' def __init__(self,parent=None): super(PlotWindow,self).__init__(parent) self.setAttribute(Qt.WA_DeleteOnClose) self.mplWidget = MatplotlibWidget() self.setCentralWidget(self.mplWidget) def plot(self,dataFrame): ''' plot dataframe ''' dataFrame.plot(ax=self.mplWidget.axes) def getAxes(self): return self.mplWidget.axes def getFigure(self): return self.mplWidget.fig def update(self): self.mplWidget.update() class MainForm(QMainWindow): def __init__(self, parent=None): QMainWindow.__init__(self, parent) self.setWindowTitle('Demo: PyQt with matplotlib') self.plot = MatplotlibWidget() self.setCentralWidget(self.plot) self.plot.clear() self.plot.plot(np.random.rand(10),'x-') #--------------------- if __name__=='__main__': app = QApplication(sys.argv) form = MainForm() form.show() app.exec_()
bsd-3-clause
hopeall/odoo
addons/project_timesheet/__openerp__.py
260
2151
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Bill Time on Tasks', 'version': '1.0', 'category': 'Project Management', 'description': """ Synchronization of project task work entries with timesheet entries. ==================================================================== This module lets you transfer the entries under tasks defined for Project Management to the Timesheet line entries for particular date and particular user with the effect of creating, editing and deleting either ways. """, 'author': 'OpenERP SA', 'website': 'https://www.odoo.com/page/project-management', 'depends': ['resource', 'project', 'hr_timesheet_sheet', 'hr_timesheet_invoice', 'account_analytic_analysis', 'procurement'], 'data': [ 'security/ir.model.access.csv', 'security/project_timesheet_security.xml', 'report/task_report_view.xml', 'project_timesheet_view.xml', ], 'demo': ['project_timesheet_demo.xml'], 'test': [ 'test/worktask_entry_to_timesheetline_entry.yml', 'test/work_timesheet.yml', ], 'installable': True, 'auto_install': True, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
home-assistant/home-assistant
homeassistant/components/sighthound/image_processing.py
4
5695
"""Person detection using Sighthound cloud service.""" import io import logging from pathlib import Path from PIL import Image, ImageDraw, UnidentifiedImageError import simplehound.core as hound import voluptuous as vol from homeassistant.components.image_processing import ( CONF_ENTITY_ID, CONF_NAME, CONF_SOURCE, PLATFORM_SCHEMA, ImageProcessingEntity, ) from homeassistant.const import ATTR_ENTITY_ID, CONF_API_KEY from homeassistant.core import split_entity_id import homeassistant.helpers.config_validation as cv import homeassistant.util.dt as dt_util from homeassistant.util.pil import draw_box _LOGGER = logging.getLogger(__name__) EVENT_PERSON_DETECTED = "sighthound.person_detected" ATTR_BOUNDING_BOX = "bounding_box" ATTR_PEOPLE = "people" CONF_ACCOUNT_TYPE = "account_type" CONF_SAVE_FILE_FOLDER = "save_file_folder" CONF_SAVE_TIMESTAMPTED_FILE = "save_timestamped_file" DATETIME_FORMAT = "%Y-%m-%d_%H:%M:%S" DEV = "dev" PROD = "prod" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_ACCOUNT_TYPE, default=DEV): vol.In([DEV, PROD]), vol.Optional(CONF_SAVE_FILE_FOLDER): cv.isdir, vol.Optional(CONF_SAVE_TIMESTAMPTED_FILE, default=False): cv.boolean, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the platform.""" # Validate credentials by processing image. api_key = config[CONF_API_KEY] account_type = config[CONF_ACCOUNT_TYPE] api = hound.cloud(api_key, account_type) try: api.detect(b"Test") except hound.SimplehoundException as exc: _LOGGER.error("Sighthound error %s setup aborted", exc) return save_file_folder = config.get(CONF_SAVE_FILE_FOLDER) if save_file_folder: save_file_folder = Path(save_file_folder) entities = [] for camera in config[CONF_SOURCE]: sighthound = SighthoundEntity( api, camera[CONF_ENTITY_ID], camera.get(CONF_NAME), save_file_folder, config[CONF_SAVE_TIMESTAMPTED_FILE], ) entities.append(sighthound) add_entities(entities) class SighthoundEntity(ImageProcessingEntity): """Create a sighthound entity.""" def __init__( self, api, camera_entity, name, save_file_folder, save_timestamped_file ): """Init.""" self._api = api self._camera = camera_entity if name: self._name = name else: camera_name = split_entity_id(camera_entity)[1] self._name = f"sighthound_{camera_name}" self._state = None self._last_detection = None self._image_width = None self._image_height = None self._save_file_folder = save_file_folder self._save_timestamped_file = save_timestamped_file def process_image(self, image): """Process an image.""" detections = self._api.detect(image) people = hound.get_people(detections) self._state = len(people) if self._state > 0: self._last_detection = dt_util.now().strftime(DATETIME_FORMAT) metadata = hound.get_metadata(detections) self._image_width = metadata["image_width"] self._image_height = metadata["image_height"] for person in people: self.fire_person_detected_event(person) if self._save_file_folder and self._state > 0: self.save_image(image, people, self._save_file_folder) def fire_person_detected_event(self, person): """Send event with detected total_persons.""" self.hass.bus.fire( EVENT_PERSON_DETECTED, { ATTR_ENTITY_ID: self.entity_id, ATTR_BOUNDING_BOX: hound.bbox_to_tf_style( person["boundingBox"], self._image_width, self._image_height ), }, ) def save_image(self, image, people, directory): """Save a timestamped image with bounding boxes around targets.""" try: img = Image.open(io.BytesIO(bytearray(image))).convert("RGB") except UnidentifiedImageError: _LOGGER.warning("Sighthound unable to process image, bad data") return draw = ImageDraw.Draw(img) for person in people: box = hound.bbox_to_tf_style( person["boundingBox"], self._image_width, self._image_height ) draw_box(draw, box, self._image_width, self._image_height) latest_save_path = directory / f"{self._name}_latest.jpg" img.save(latest_save_path) if self._save_timestamped_file: timestamp_save_path = directory / f"{self._name}_{self._last_detection}.jpg" img.save(timestamp_save_path) _LOGGER.info("Sighthound saved file %s", timestamp_save_path) @property def camera_entity(self): """Return camera entity id from process pictures.""" return self._camera @property def name(self): """Return the name of the sensor.""" return self._name @property def should_poll(self): """Return the polling state.""" return False @property def state(self): """Return the state of the entity.""" return self._state @property def unit_of_measurement(self): """Return the unit of measurement.""" return ATTR_PEOPLE @property def extra_state_attributes(self): """Return the attributes.""" if not self._last_detection: return {} return {"last_person": self._last_detection}
apache-2.0
BeegorMif/HTPC-Manager
lib/guessit/transfo/guess_release_group.py
21
3682
#!/usr/bin/env python2 # -*- coding: utf-8 -*- # # GuessIt - A library for guessing information from filenames # Copyright (c) 2012 Nicolas Wack <[email protected]> # # GuessIt is free software; you can redistribute it and/or modify it under # the terms of the Lesser GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # GuessIt is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # Lesser GNU General Public License for more details. # # You should have received a copy of the Lesser GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # from __future__ import unicode_literals from guessit.transfo import SingleNodeGuesser from guessit.patterns import prop_multi, compute_canonical_form, _dash, _psep import re import logging log = logging.getLogger(__name__) def get_patterns(property_name): return [ p.replace(_dash, _psep) for patterns in prop_multi[property_name].values() for p in patterns ] CODECS = get_patterns('videoCodec') FORMATS = get_patterns('format') VAPIS = get_patterns('videoApi') # RG names following a codec or format, with a potential space or dash inside the name GROUP_NAMES = [ r'(?P<videoCodec>' + codec + r')[ \.-](?P<releaseGroup>.+?([- \.].*?)??)[ \.]' for codec in CODECS ] GROUP_NAMES += [ r'(?P<format>' + fmt + r')[ \.-](?P<releaseGroup>.+?([- \.].*?)??)[ \.]' for fmt in FORMATS ] GROUP_NAMES += [ r'(?P<videoApi>' + api + r')[ \.-](?P<releaseGroup>.+?([- \.].*?)??)[ \.]' for api in VAPIS ] GROUP_NAMES2 = [ r'\.(?P<videoCodec>' + codec + r')-(?P<releaseGroup>.*?)(-(.*?))?[ \.]' for codec in CODECS ] GROUP_NAMES2 += [ r'\.(?P<format>' + fmt + r')-(?P<releaseGroup>.*?)(-(.*?))?[ \.]' for fmt in FORMATS ] GROUP_NAMES2 += [ r'\.(?P<videoApi>' + vapi + r')-(?P<releaseGroup>.*?)(-(.*?))?[ \.]' for vapi in VAPIS ] GROUP_NAMES = [ re.compile(r, re.IGNORECASE) for r in GROUP_NAMES ] GROUP_NAMES2 = [ re.compile(r, re.IGNORECASE) for r in GROUP_NAMES2 ] def adjust_metadata(md): return dict((property_name, compute_canonical_form(property_name, value) or value) for property_name, value in md.items()) def guess_release_group(string): # first try to see whether we have both a known codec and a known release group for rexp in GROUP_NAMES: match = rexp.search(string) while match: metadata = match.groupdict() # make sure this is an actual release group we caught release_group = (compute_canonical_form('releaseGroup', metadata['releaseGroup']) or compute_canonical_form('weakReleaseGroup', metadata['releaseGroup'])) if release_group: return adjust_metadata(metadata), (match.start(1), match.end(2)) # we didn't find anything conclusive, keep searching match = rexp.search(string, match.span()[0]+1) # pick anything as releaseGroup as long as we have a codec in front # this doesn't include a potential dash ('-') ending the release group # eg: [...].X264-HiS@SiLUHD-English.[...] for rexp in GROUP_NAMES2: match = rexp.search(string) if match: return adjust_metadata(match.groupdict()), (match.start(1), match.end(2)) return None, None def process(mtree): SingleNodeGuesser(guess_release_group, 0.8, log).process(mtree)
gpl-3.0
iamaris/CMUAnalysis
Common/generateObjectTree.py
1
11728
import re import os objects = ['Photon', 'Electron', 'Muon', 'Jet', 'Vertex'] susyObjects = {'Photon': 'Photon', 'Electron': 'Electron', 'Muon': 'Muon', 'Jet': 'PFJet', 'Vertex': 'Vertex'} objectVars = file('ObjectVars.h') classPat = re.compile('^[ ]*class[ ]+([a-zA-Z0-9]+)Vars[ ]*{') cTorPat = re.compile('^[ ]*[a-zA-Z0-9]+Vars\([^,]+(,[ ]+Event.*|)\);') varPat = re.compile('^[ ]*((?:unsigned[ ]|)(?:bool|char|short|int|unsigned|long|float|double))[ ]+([a-zA-Z_][a-zA-Z0-9_]*);') useEvent = dict() varList = dict() obj = '' for line in objectVars: if '};' in line: obj = '' if obj: cTorMatch = cTorPat.match(line) if cTorMatch: useEvent[obj] = len(cTorMatch.group(1)) != 0 varMatch = varPat.match(line) if varMatch: varList[obj].append((varMatch.group(1), varMatch.group(2))) lineMatch = classPat.match(line) if lineMatch and lineMatch.group(1) in objects: obj = lineMatch.group(1) varList[obj] = [] objectVars.close() # GENERATE HEADER headerContent = '''/* Auto-generated header file */ #ifndef ObjectTree_h #define ObjectTree_h #include "ObjectVars.h" #include "TTree.h" #include "TString.h" namespace susy { unsigned const NMAX(512); ''' for obj in objects: headerContent += ''' class ''' + obj + '''VarsArray { public: ''' + obj + '''VarsArray() {} ~''' + obj + '''VarsArray() {} void setBranches(TTree&); void setAddress(TTree&); void push_back(''' + obj + '''Vars const&); void clear() { size = 0; } ''' + obj + '''Vars at(unsigned) const; unsigned size; ''' for (type, name) in varList[obj]: headerContent += ''' ''' + type + ' ' + name + '[NMAX];' headerContent += ''' }; ''' headerContent += ''' class ObjectTree { public: ObjectTree(); ~ObjectTree(); void setOutput(TString const&,''' for i in range(len(objects)): headerContent += ' bool = true' if i != len(objects) - 1: headerContent += ',' else: headerContent += ');' headerContent += ''' void setOutput(TTree&,''' for i in range(len(objects)): headerContent += ' bool = true' if i != len(objects) - 1: headerContent += ',' else: headerContent += ');' headerContent += ''' static void setBranchStatus(TTree&,''' for i in range(len(objects)): headerContent += ' bool = true' if i != len(objects) - 1: headerContent += ',' else: headerContent += ');' headerContent += ''' void initEvent(Event const&); void fill() { output_->Fill(); }''' for obj in objects: lowerName = obj.lower() headerContent += ''' void save(''' + obj + 'Vars const& _vars) { ' + lowerName + 'Array_.push_back(_vars); }' for obj in objects: lowerName = obj.lower() headerContent += ''' unsigned get''' + obj + 'Size() const { return ' + lowerName + 'Array_.size; }' for obj in objects: lowerName = obj.lower() headerContent += ''' ''' + obj + 'VarsArray const& get' + obj + 'Array() const { return ' + lowerName + 'Array_; }' headerContent += ''' private: void setBranches_(''' for i in range(len(objects)): headerContent += 'bool' if i != len(objects) - 1: headerContent += ', ' else: headerContent += ');' for obj in objects: lowerName = obj.lower() headerContent += ''' ''' + obj + '''VarsArray ''' + lowerName + '''Array_;''' headerContent += ''' unsigned runNumber_; unsigned lumiNumber_; unsigned eventNumber_; TTree* output_; bool ownOutput_; }; } #endif ''' headerFile = file('ObjectTree.h', 'w') headerFile.write(headerContent) headerFile.close() # GENERATE SRC cTors = dict() setBranches = dict() setAddress = dict() pushBack = dict() at = dict() for obj in objects: lowerName = obj.lower() cTorText = ''' ''' + obj + 'Vars::' + obj + '''Vars() :''' initList = '' for (type, name) in varList[obj]: initList += ''' ''' + name + '(' if type == 'float' or type == 'double': initList += '0.' elif type == 'bool': initList += 'false' else: initList += '0' initList += '),' initList = initList.rstrip(',') cTorText += initList cTorText += ''' { } ''' cTors[obj] = cTorText setBranchText = ''' void ''' + obj + '''VarsArray::setBranches(TTree& _tree) { _tree.Branch("''' + lowerName + '.size", &size, "' + lowerName + '.size/i");' for (type, name) in varList[obj]: branch = ''' _tree.Branch("''' + lowerName + '.' + name + '", ' + name + ', "' + name + '[' + lowerName + '.size]/' if type == 'char': branch += 'B' elif type == 'unsigned char': branch += 'b' elif type == 'short': branch += 'S' elif type == 'unsigned short': branch += 's' elif type == 'int': branch += 'I' elif type == 'unsigned' or type == 'unsigned int': branch += 'i' elif type == 'long': branch += 'L' elif type == 'unsigned long': branch += 'l' elif type == 'float': branch += 'F' elif type == 'double': branch += 'D' elif type == 'bool': branch += 'O' branch += '");' setBranchText += branch setBranchText += ''' } ''' setBranches[obj] = setBranchText setAddressText = ''' void ''' + obj + '''VarsArray::setAddress(TTree& _tree) { std::vector<TString> notFound; _tree.SetBranchAddress("''' + lowerName + '.size", &size);' for (type, name) in varList[obj]: bName = lowerName + '.' + name setAddressText += ''' if(_tree.GetBranch("''' + bName + '")) _tree.SetBranchAddress("' + bName + '", ' + name + '''); else notFound.push_back("''' + bName + '");' setAddressText += ''' for(unsigned iN(0); iN != notFound.size(); ++iN) std::cerr << "Branch " << notFound[iN] << " not found in input" << std::endl; } ''' setAddress[obj] = setAddressText pushBackText = ''' void ''' + obj + 'VarsArray::push_back(' + obj + '''Vars const& _vars) { if(size == NMAX - 1) throw std::runtime_error("Too many ''' + obj + '''s"); ''' for (type, name) in varList[obj]: pushBackText += ''' ''' + name + '[size] = _vars.' + name + ';' pushBackText += ''' ++size; } ''' pushBack[obj] = pushBackText atText = ''' ''' + obj + '''Vars ''' + obj + '''VarsArray::at(unsigned _pos) const { if(_pos >= size) throw std::runtime_error("''' + obj + '''Vars out-of-bounds"); ''' + obj + '''Vars vars; ''' for (type, name) in varList[obj]: atText += ''' vars.''' + name + ' = ' + name + '[_pos];' atText += ''' return vars; } ''' at[obj] = atText preamble = '#include "ObjectVars.h"\n' try: originalSrc = file('ObjectVars.cc', 'r') userDef = '' copy = False namespace = False for line in originalSrc: if 'namespace susy' in line: namespace = True if not namespace and 'ObjectVars.h' not in line and not re.match('^[ ]*/\*.*\*/[ ]*$', line): preamble += line if '/* START USER-DEFINED IMPLEMENTATION (DO NOT MODIFY THIS LINE) */' in line: copy = True if copy: userDef += line if '/* END USER-DEFINED IMPLEMENTATION (DO NOT MODIFY THIS LINE) */' in line: copy = False originalSrc.close() except: userDef = '\n/* START USER-DEFINED IMPLEMENTATION (DO NOT MODIFY THIS LINE) */\n' for obj in objects: userDef += ''' void ''' + obj + '''Vars::set(''' + susyObjects[obj] + ' const&' if useEvent[obj]: userDef += ', Event const&' userDef += ''') { } /*static*/ ''' + obj + '''Vars::setBranchStatus(TTree&) { } ''' userDef += '/* END USER-DEFINED IMPLEMENTATION (DO NOT MODIFY THIS LINE) */\n' # ObjectTree.cc objTreeContent = '''/* Auto-generated source file */ #include "ObjectTree.h" #include "TFile.h" #include <stdexcept> #include <iostream> namespace susy { ''' for obj in objects: objTreeContent += setBranches[obj] objTreeContent += setAddress[obj] objTreeContent += pushBack[obj] objTreeContent += at[obj] objTreeContent += ''' ObjectTree::ObjectTree() :''' for obj in objects: lowerName = obj.lower() objTreeContent += ''' ''' + lowerName + '''Array_(),''' objTreeContent += ''' runNumber_(0), lumiNumber_(0), eventNumber_(0), output_(0), ownOutput_(false) { } ObjectTree::~ObjectTree() { if(ownOutput_ && output_){ TFile* outFile(output_->GetCurrentFile()); outFile->cd(); output_->Write(); delete outFile; } } void ObjectTree::setOutput(TString const& _fileName''' for obj in objects: objTreeContent += ', bool _set' + obj + '/* = true*/' objTreeContent += ''') { ownOutput_ = true; TFile::Open(_fileName, "recreate"); output_ = new TTree("objectVars", "Object ID variables"); setBranches_(''' for obj in objects: objTreeContent += '_set' + obj + ', ' objTreeContent = objTreeContent.rstrip(', ') objTreeContent += '''); } void ObjectTree::setOutput(TTree& _tree''' for obj in objects: objTreeContent += ', bool _set' + obj + '/* = true*/' objTreeContent += ''') { output_ = &_tree; setBranches_(''' for obj in objects: objTreeContent += '_set' + obj + ', ' objTreeContent = objTreeContent.rstrip(', ') objTreeContent += '''); } /*static*/ void ObjectTree::setBranchStatus(TTree& _input''' for obj in objects: objTreeContent += ', bool _set' + obj + '/* = true*/' objTreeContent += ''') { _input.SetBranchStatus("runNumber", 1); _input.SetBranchStatus("luminosityBlockNumber", 1); _input.SetBranchStatus("eventNumber", 1); ''' for obj in objects: objTreeContent += ''' if(_set''' + obj + ') ' + obj + 'Vars::setBranchStatus(_input);' objTreeContent += ''' } #ifdef STANDALONE void ObjectTree::initEvent(Event const&) { runNumber_ = 0; lumiNumber_ = 0; eventNumber_ = 0; #else void ObjectTree::initEvent(Event const& _event) { runNumber_ = _event.runNumber; lumiNumber_ = _event.luminosityBlockNumber; eventNumber_ = _event.eventNumber; #endif''' for obj in objects: objTreeContent += ''' ''' + obj.lower() + 'Array_.clear();' objTreeContent += ''' } void ObjectTree::setBranches_(''' for obj in objects: objTreeContent += 'bool _set' + obj + ', ' objTreeContent = objTreeContent.rstrip(', ') + ')' objTreeContent += ''' { output_->Branch("runNumber", &runNumber_, "runNumber/i"); output_->Branch("lumiNumber", &lumiNumber_, "lumiNumber/i"); output_->Branch("eventNumber", &eventNumber_, "eventNumber/i"); ''' for obj in objects: objTreeContent += ''' if(_set''' + obj + ') ' + obj.lower() + 'Array_.setBranches(*output_);' objTreeContent += ''' } ''' objTreeContent += '}\n' objTreeFile = file('ObjectTree.cc', 'w') objTreeFile.write(objTreeContent) objTreeFile.close() # ObjectVars.cc objVarsContent = '''/* Partially auto-generated source file - edit where indicated */ /* Add necessary inclusions below */ ''' + preamble + ''' namespace susy { ''' for obj in objects: objVarsContent += cTors[obj] objVarsContent += '\n' objVarsContent += userDef objVarsContent += ''' } ''' objVarsFile = file('ObjectVars.cc', 'w') objVarsFile.write(objVarsContent) objVarsFile.close()
apache-2.0
riyer15/python_koans
python3/koans/about_scoring_project.py
107
2207
#!/usr/bin/env python # -*- coding: utf-8 -*- from runner.koan import * # Greed is a dice game where you roll up to five dice to accumulate # points. The following "score" function will be used calculate the # score of a single roll of the dice. # # A greed roll is scored as follows: # # * A set of three ones is 1000 points # # * A set of three numbers (other than ones) is worth 100 times the # number. (e.g. three fives is 500 points). # # * A one (that is not part of a set of three) is worth 100 points. # # * A five (that is not part of a set of three) is worth 50 points. # # * Everything else is worth 0 points. # # # Examples: # # score([1,1,1,5,1]) => 1150 points # score([2,3,4,6,2]) => 0 points # score([3,4,5,3,3]) => 350 points # score([1,5,1,2,4]) => 250 points # # More scoring examples are given in the tests below: # # Your goal is to write the score method. def score(dice): # You need to write this method pass class AboutScoringProject(Koan): def test_score_of_an_empty_list_is_zero(self): self.assertEqual(0, score([])) def test_score_of_a_single_roll_of_5_is_50(self): self.assertEqual(50, score([5])) def test_score_of_a_single_roll_of_1_is_100(self): self.assertEqual(100, score([1])) def test_score_of_multiple_1s_and_5s_is_the_sum_of_individual_scores(self): self.assertEqual(300, score([1,5,5,1])) def test_score_of_single_2s_3s_4s_and_6s_are_zero(self): self.assertEqual(0, score([2,3,4,6])) def test_score_of_a_triple_1_is_1000(self): self.assertEqual(1000, score([1,1,1])) def test_score_of_other_triples_is_100x(self): self.assertEqual(200, score([2,2,2])) self.assertEqual(300, score([3,3,3])) self.assertEqual(400, score([4,4,4])) self.assertEqual(500, score([5,5,5])) self.assertEqual(600, score([6,6,6])) def test_score_of_mixed_is_sum(self): self.assertEqual(250, score([2,5,2,2,3])) self.assertEqual(550, score([5,5,5,5])) self.assertEqual(1150, score([1,1,1,5,1])) def test_ones_not_left_out(self): self.assertEqual(300, score([1,2,2,2])) self.assertEqual(350, score([1,5,2,2,2]))
mit
victorbriz/rethinkdb
scripts/ui-tests.py
50
3779
#!/usr/bin/env python # Copyright 2010-2012 RethinkDB, all rights reserved. import os, sys, subprocess, argparse from termcolor import colored, cprint import time tests = [ 'add-a-namespace', 'add-a-datacenter', 'view-dashboard', ] git_root = subprocess.Popen(['git', 'rev-parse', '--show-toplevel'], stdout=subprocess.PIPE).communicate()[0].rstrip('\r\n') test_file_dir = os.path.join(git_root, 'test/ui_test/') cwd = os.getcwd() # Define and parse command-line arguments parser = argparse.ArgumentParser(description='Run a set of UI tests using CasperJS / PhantomJS.') parser.add_argument('tests', nargs='*', help='List of tests to run. Specify \'all\' to run all tests.') parser.add_argument('-p','--rdb-port', nargs='?', dest='rdb_port', default='6001', help='Port of the RethinkDB server to connect to (default is 6001).') parser.add_argument('-i','--output-images', nargs='?', dest='image_output_directory', const='./casper-results', help='Include if images should be scraped and saved. Optionally specify the output directory (default is ./casper-results/).') parser.add_argument('-l','--list-tests', action='store_true', help='List available tests to run.') parser.add_argument('-r','--output-results', nargs='?', dest='result_output_directory', const='./casper-results', help='Include if test results should be saved. Optionally specify the output directory (default is ./casper-results/).') args = parser.parse_args() def print_available_tests(): print 'Available tests:' print '\t- all: run all of the following tests' for test in tests: print '\t- ' + test if args.list_tests: print_available_tests() exit(0) if len(args.tests) < 1: parser.print_usage() print '\nNo test specified.', print_available_tests() exit(1) # Prepare the list of tests to process; if 'all' was one of the specified tests then process all tests if 'all' in args.tests: test_list = tests else: test_list = args.tests # Process each test name specified on the command line successful_tests = 0 os.chdir(test_file_dir) for test_name in test_list: # Look for a matching test among known tests casper_script = os.path.join(test_file_dir, test_name + '.coffee') try: with open(casper_script) as f: pass except IOError as e: print "No test script found for CasperJS test '%s'." % test_name continue # Build command with arguments for casperjs test cl = ['casperjs', '--rdb-server=http://localhost:' + args.rdb_port + '/', casper_script] # If the option to scrape images was specified, add it to the casperjs argument list if args.image_output_directory: image_dir = os.path.abspath(args.image_output_directory) cl.extend(['--images=' + image_dir]) # Execute casperjs and pretty-print its output process = subprocess.Popen(cl, stdout=subprocess.PIPE) stdout = process.stdout.readlines() for i, line in enumerate(stdout): cprint('[%s]' % test_name, attrs=['bold'], end=' ') print line.rstrip('\n') # If the option to save results was specified, save stdout to a file if args.result_output_directory: result_dir = os.path.abspath(args.result_output_directory) result_filename = "casper-result_%s" % test_name result_file = open(os.path.join(result_dir, result_filename), 'w') for line in stdout: result_file.write(line) result_file.close() # Check the exit code of the process # 0: casper test passed # 1: casper test failed process.poll() if process.returncode == 0: successful_tests += 1 print # Print test suite summary cprint(" %d of %d tests ran successfully. " % (successful_tests, len(test_list)), attrs=['reverse'])
agpl-3.0
jakesyl/androguard
androguard/core/analysis/sign.py
38
13670
# This file is part of Androguard. # # Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr> # All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from androguard.core.analysis.analysis import TAINTED_PACKAGE_CREATE, TAINTED_PACKAGE_CALL from androguard.core.bytecodes import dvm TAINTED_PACKAGE_INTERNAL_CALL = 2 FIELD_ACCESS = { "R" : 0, "W" : 1 } PACKAGE_ACCESS = { TAINTED_PACKAGE_CREATE : 0, TAINTED_PACKAGE_CALL : 1, TAINTED_PACKAGE_INTERNAL_CALL : 2 } class Sign : def __init__(self) : self.levels = {} self.hlevels = [] def add(self, level, value) : self.levels[ level ] = value self.hlevels.append( level ) def get_level(self, l) : return self.levels[ "L%d" % l ] def get_string(self) : buff = "" for i in self.hlevels : buff += self.levels[ i ] return buff def get_list(self) : return self.levels[ "sequencebb" ] class Signature : def __init__(self, vmx) : self.vmx = vmx self.tainted_packages = self.vmx.get_tainted_packages() self.tainted_variables = self.vmx.get_tainted_variables() self._cached_signatures = {} self._cached_fields = {} self._cached_packages = {} self._global_cached = {} self.levels = { # Classical method signature with basic blocks, strings, fields, packages "L0" : { 0 : ( "_get_strings_a", "_get_fields_a", "_get_packages_a" ), 1 : ( "_get_strings_pa", "_get_fields_a", "_get_packages_a" ), 2 : ( "_get_strings_a", "_get_fields_a", "_get_packages_pa_1" ), 3 : ( "_get_strings_a", "_get_fields_a", "_get_packages_pa_2" ), }, # strings "L1" : [ "_get_strings_a1" ], # exceptions "L2" : [ "_get_exceptions" ], # fill array data "L3" : [ "_get_fill_array_data" ], } self.classes_names = None self._init_caches() def _get_method_info(self, m) : m1 = m.get_method() return "%s-%s-%s" % (m1.get_class_name(), m1.get_name(), m1.get_descriptor()) def _get_sequence_bb(self, analysis_method) : l = [] for i in analysis_method.basic_blocks.get() : buff = "" instructions = [j for j in i.get_instructions()] if len(instructions) > 5 : for ins in instructions : buff += ins.get_name() if buff != "" : l.append( buff ) return l def _get_hex(self, analysis_method) : code = analysis_method.get_method().get_code() if code == None : return "" buff = "" for i in code.get_bc().get_instructions() : buff += dvm.clean_name_instruction( i ) buff += dvm.static_operand_instruction( i ) return buff def _get_bb(self, analysis_method, functions, options) : bbs = [] for b in analysis_method.basic_blocks.get() : l = [] l.append( (b.start, "B") ) l.append( (b.start, "[") ) internal = [] op_value = b.get_last().get_op_value() # return if op_value >= 0x0e and op_value <= 0x11 : internal.append( (b.end-1, "R") ) # if elif op_value >= 0x32 and op_value <= 0x3d : internal.append( (b.end-1, "I") ) # goto elif op_value >= 0x28 and op_value <= 0x2a : internal.append( (b.end-1, "G") ) # sparse or packed switch elif op_value >= 0x2b and op_value <= 0x2c : internal.append( (b.end-1, "G") ) for f in functions : try : internal.extend( getattr( self, f )( analysis_method, options ) ) except TypeError : internal.extend( getattr( self, f )( analysis_method ) ) internal.sort() for i in internal : if i[0] >= b.start and i[0] < b.end : l.append( i ) del internal l.append( (b.end, "]") ) bbs.append( ''.join(i[1] for i in l) ) return bbs def _init_caches(self) : if self._cached_fields == {} : for f_t, f in self.tainted_variables.get_fields() : self._cached_fields[ f ] = f_t.get_paths_length() n = 0 for f in sorted( self._cached_fields ) : self._cached_fields[ f ] = n n += 1 if self._cached_packages == {} : for m_t, m in self.tainted_packages.get_packages() : self._cached_packages[ m ] = m_t.get_paths_length() n = 0 for m in sorted( self._cached_packages ) : self._cached_packages[ m ] = n n += 1 def _get_fill_array_data(self, analysis_method) : buff = "" for b in analysis_method.basic_blocks.get() : for i in b.get_instructions() : if i.get_name() == "FILL-ARRAY-DATA" : buff_tmp = i.get_operands() for j in range(0, len(buff_tmp)) : buff += "\\x%02x" % ord( buff_tmp[j] ) return buff def _get_exceptions(self, analysis_method) : buff = "" method = analysis_method.get_method() code = method.get_code() if code == None or code.get_tries_size() <= 0 : return buff handler_catch_list = code.get_handlers() for handler_catch in handler_catch_list.get_list() : for handler in handler_catch.get_handlers() : buff += analysis_method.get_vm().get_cm_type( handler.get_type_idx() ) return buff def _get_strings_a1(self, analysis_method) : buff = "" strings_method = self.tainted_variables.get_strings_by_method( analysis_method.get_method() ) for s in strings_method : for path in strings_method[s] : buff += s.replace('\n', ' ') return buff def _get_strings_pa(self, analysis_method) : l = [] strings_method = self.tainted_variables.get_strings_by_method( analysis_method.get_method() ) for s in strings_method : for path in strings_method[s] : l.append( ( path[1], "S%d" % len(s) ) ) return l def _get_strings_a(self, analysis_method) : key = "SA-%s" % self._get_method_info(analysis_method) if key in self._global_cached : return self._global_cached[ key ] l = [] strings_method = self.tainted_variables.get_strings_by_method( analysis_method.get_method() ) for s in strings_method : for path in strings_method[s] : l.append( ( path[1], "S") ) self._global_cached[ key ] = l return l def _get_fields_a(self, analysis_method) : key = "FA-%s" % self._get_method_info(analysis_method) if key in self._global_cached : return self._global_cached[ key ] fields_method = self.tainted_variables.get_fields_by_method( analysis_method.get_method() ) l = [] for f in fields_method : for path in fields_method[ f ] : l.append( (path[1], "F%d" % FIELD_ACCESS[ path[0] ]) ) self._global_cached[ key ] = l return l def _get_packages_a(self, analysis_method) : packages_method = self.tainted_packages.get_packages_by_method( analysis_method.get_method() ) l = [] for m in packages_method : for path in packages_method[ m ] : l.append( (path.get_idx(), "P%s" % (PACKAGE_ACCESS[ path.get_access_flag() ]) ) ) return l def _get_packages(self, analysis_method, include_packages) : l = self._get_packages_pa_1( analysis_method, include_packages ) return "".join([ i[1] for i in l ]) def _get_packages_pa_1(self, analysis_method, include_packages) : key = "PA1-%s-%s" % (self._get_method_info(analysis_method), include_packages) if key in self._global_cached : return self._global_cached[ key ] packages_method = self.tainted_packages.get_packages_by_method( analysis_method.get_method() ) if self.classes_names == None : self.classes_names = analysis_method.get_vm().get_classes_names() l = [] for m in packages_method : for path in packages_method[ m ] : present = False for i in include_packages : if m.find(i) == 0 : present = True break if path.get_access_flag() == 1 : dst_class_name, dst_method_name, dst_descriptor = path.get_dst( analysis_method.get_vm().get_class_manager() ) if dst_class_name in self.classes_names : l.append( (path.get_idx(), "P%s" % (PACKAGE_ACCESS[ 2 ]) ) ) else : if present == True : l.append( (path.get_idx(), "P%s{%s%s%s}" % (PACKAGE_ACCESS[ path.get_access_flag() ], dst_class_name, dst_method_name, dst_descriptor ) ) ) else : l.append( (path.get_idx(), "P%s" % (PACKAGE_ACCESS[ path.get_access_flag() ]) ) ) else : if present == True : l.append( (path.get_idx(), "P%s{%s}" % (PACKAGE_ACCESS[ path.get_access_flag() ], m) ) ) else : l.append( (path.get_idx(), "P%s" % (PACKAGE_ACCESS[ path.get_access_flag() ]) ) ) self._global_cached[ key ] = l return l def _get_packages_pa_2(self, analysis_method, include_packages) : packages_method = self.tainted_packages.get_packages_by_method( analysis_method.get_method() ) l = [] for m in packages_method : for path in packages_method[ m ] : present = False for i in include_packages : if m.find(i) == 0 : present = True break if present == True : l.append( (path.get_idx(), "P%s" % (PACKAGE_ACCESS[ path.get_access_flag() ]) ) ) continue if path.get_access_flag() == 1 : dst_class_name, dst_method_name, dst_descriptor = path.get_dst( analysis_method.get_vm().get_class_manager() ) l.append( (path.get_idx(), "P%s{%s%s%s}" % (PACKAGE_ACCESS[ path.get_access_flag() ], dst_class_name, dst_method_name, dst_descriptor ) ) ) else : l.append( (path.get_idx(), "P%s{%s}" % (PACKAGE_ACCESS[ path.get_access_flag() ], m) ) ) return l def get_method(self, analysis_method, signature_type, signature_arguments={}) : key = "%s-%s-%s" % (self._get_method_info(analysis_method), signature_type, signature_arguments) if key in self._cached_signatures : return self._cached_signatures[ key ] s = Sign() #print signature_type, signature_arguments for i in signature_type.split(":") : # print i, signature_arguments[ i ] if i == "L0" : _type = self.levels[ i ][ signature_arguments[ i ][ "type" ] ] try : _arguments = signature_arguments[ i ][ "arguments" ] except KeyError : _arguments = [] value = self._get_bb( analysis_method, _type, _arguments ) s.add( i, ''.join(z for z in value) ) elif i == "L4" : try : _arguments = signature_arguments[ i ][ "arguments" ] except KeyError : _arguments = [] value = self._get_packages( analysis_method, _arguments ) s.add( i , value ) elif i == "hex" : value = self._get_hex( analysis_method ) s.add( i, value ) elif i == "sequencebb" : _type = ('_get_strings_a', '_get_fields_a', '_get_packages_pa_1') _arguments = ['Landroid', 'Ljava'] #value = self._get_bb( analysis_method, _type, _arguments ) #s.add( i, value ) value = self._get_sequence_bb( analysis_method ) s.add( i, value ) else : for f in self.levels[ i ] : value = getattr( self, f )( analysis_method ) s.add( i, value ) self._cached_signatures[ key ] = s return s
apache-2.0
dya2/python-for-android
python-modules/twisted/twisted/test/test_text.py
49
5450
# Copyright (c) 2001-2010 Twisted Matrix Laboratories. # See LICENSE for details. from twisted.trial import unittest from twisted.python import text import string from cStringIO import StringIO sampleText = \ """Every attempt to employ mathematical methods in the study of chemical questions must be considered profoundly irrational and contrary to the spirit of chemistry ... If mathematical analysis should ever hold a prominent place in chemistry - an aberration which is happily almost impossible - it would occasion a rapid and widespread degeneration of that science. -- Auguste Comte, Philosophie Positive, Paris, 1838 """ lineWidth = 72 def set_lineWidth(n): global lineWidth lineWidth = n class WrapTest(unittest.TestCase): def setUp(self): self.sampleSplitText = string.split(sampleText) self.output = text.wordWrap(sampleText, lineWidth) def test_wordCount(self): """Compare the number of words.""" words = [] for line in self.output: words.extend(string.split(line)) wordCount = len(words) sampleTextWordCount = len(self.sampleSplitText) self.failUnlessEqual(wordCount, sampleTextWordCount) def test_wordMatch(self): """Compare the lists of words.""" words = [] for line in self.output: words.extend(string.split(line)) # Using failUnlessEqual here prints out some # rather too long lists. self.failUnless(self.sampleSplitText == words) def test_lineLength(self): """Check the length of the lines.""" failures = [] for line in self.output: if not len(line) <= lineWidth: failures.append(len(line)) if failures: self.fail("%d of %d lines were too long.\n" "%d < %s" % (len(failures), len(self.output), lineWidth, failures)) class SplitTest(unittest.TestCase): """Tests for text.splitQuoted()""" def test_oneWord(self): """Splitting strings with one-word phrases.""" s = 'This code "works."' r = text.splitQuoted(s) self.failUnlessEqual(['This', 'code', 'works.'], r) def test_multiWord(self): s = 'The "hairy monkey" likes pie.' r = text.splitQuoted(s) self.failUnlessEqual(['The', 'hairy monkey', 'likes', 'pie.'], r) # Some of the many tests that would fail: #def test_preserveWhitespace(self): # phrase = '"MANY SPACES"' # s = 'With %s between.' % (phrase,) # r = text.splitQuoted(s) # self.failUnlessEqual(['With', phrase, 'between.'], r) #def test_escapedSpace(self): # s = r"One\ Phrase" # r = text.splitQuoted(s) # self.failUnlessEqual(["One Phrase"], r) class StrFileTest(unittest.TestCase): def setUp(self): self.io = StringIO("this is a test string") def tearDown(self): pass def test_1_f(self): self.assertEquals(False, text.strFile("x", self.io)) def test_1_1(self): self.assertEquals(True, text.strFile("t", self.io)) def test_1_2(self): self.assertEquals(True, text.strFile("h", self.io)) def test_1_3(self): self.assertEquals(True, text.strFile("i", self.io)) def test_1_4(self): self.assertEquals(True, text.strFile("s", self.io)) def test_1_5(self): self.assertEquals(True, text.strFile("n", self.io)) def test_1_6(self): self.assertEquals(True, text.strFile("g", self.io)) def test_3_1(self): self.assertEquals(True, text.strFile("thi", self.io)) def test_3_2(self): self.assertEquals(True, text.strFile("his", self.io)) def test_3_3(self): self.assertEquals(True, text.strFile("is ", self.io)) def test_3_4(self): self.assertEquals(True, text.strFile("ing", self.io)) def test_3_f(self): self.assertEquals(False, text.strFile("bla", self.io)) def test_large_1(self): self.assertEquals(True, text.strFile("this is a test", self.io)) def test_large_2(self): self.assertEquals(True, text.strFile("is a test string", self.io)) def test_large_f(self): self.assertEquals(False, text.strFile("ds jhfsa k fdas", self.io)) def test_overlarge_f(self): self.assertEquals(False, text.strFile("djhsakj dhsa fkhsa s,mdbnfsauiw bndasdf hreew", self.io)) def test_self(self): self.assertEquals(True, text.strFile("this is a test string", self.io)) def test_insensitive(self): self.assertEquals(True, text.strFile("ThIs is A test STRING", self.io, False)) class DeprecationTest(unittest.TestCase): """ Tests for deprecations in L{twisted.python.text} """ def test_docstringLStrip(self): """ L{docstringLStrip} is deprecated as of 10.2.0 """ text.docstringLStrip("") warningsShown = self.flushWarnings([self.test_docstringLStrip]) self.assertEquals(1, len(warningsShown)) self.assertIdentical(warningsShown[0]['category'], DeprecationWarning) self.assertEquals(warningsShown[0]['message'], "twisted.python.text.docstringLStrip was " "deprecated in Twisted 10.2.0: Please use " "inspect.getdoc instead.") testCases = [WrapTest, SplitTest, StrFileTest]
apache-2.0
kambysese/mne-python
mne/connectivity/utils.py
15
2957
# Authors: Martin Luessi <[email protected]> # # License: BSD (3-clause) import numpy as np def check_indices(indices): """Check indices parameter.""" if not isinstance(indices, tuple) or len(indices) != 2: raise ValueError('indices must be a tuple of length 2') if len(indices[0]) != len(indices[1]): raise ValueError('Index arrays indices[0] and indices[1] must ' 'have the same length') return indices def seed_target_indices(seeds, targets): """Generate indices parameter for seed based connectivity analysis. Parameters ---------- seeds : array of int | int Seed indices. targets : array of int | int Indices of signals for which to compute connectivity. Returns ------- indices : tuple of array The indices parameter used for connectivity computation. """ # make them arrays seeds = np.asarray((seeds,)).ravel() targets = np.asarray((targets,)).ravel() n_seeds = len(seeds) n_targets = len(targets) indices = (np.concatenate([np.tile(i, n_targets) for i in seeds]), np.tile(targets, n_seeds)) return indices def degree(connectivity, threshold_prop=0.2): """Compute the undirected degree of a connectivity matrix. Parameters ---------- connectivity : ndarray, shape (n_nodes, n_nodes) The connectivity matrix. threshold_prop : float The proportion of edges to keep in the graph before computing the degree. The value should be between 0 and 1. Returns ------- degree : ndarray, shape (n_nodes,) The computed degree. Notes ----- During thresholding, the symmetry of the connectivity matrix is auto-detected based on :func:`numpy.allclose` of it with its transpose. """ connectivity = np.array(connectivity) if connectivity.ndim != 2 or \ connectivity.shape[0] != connectivity.shape[1]: raise ValueError('connectivity must be have shape (n_nodes, n_nodes), ' 'got %s' % (connectivity.shape,)) n_nodes = len(connectivity) if np.allclose(connectivity, connectivity.T): split = 2. connectivity[np.tril_indices(n_nodes)] = 0 else: split = 1. threshold_prop = float(threshold_prop) if not 0 < threshold_prop <= 1: raise ValueError('threshold must be 0 <= threshold < 1, got %s' % (threshold_prop,)) degree = connectivity.ravel() # no need to copy because np.array does degree[::n_nodes + 1] = 0. n_keep = int(round((degree.size - len(connectivity)) * threshold_prop / split)) degree[np.argsort(degree)[:-n_keep]] = 0 degree.shape = connectivity.shape if split == 2: degree += degree.T # normally unsafe, but we know where our zeros are degree = np.sum(degree > 0, axis=0) return degree
bsd-3-clause
irisfeng/CodeScanner
SZQRCodeViewController/Pods/AVOSCloudCrashReporting/Breakpad/src/tools/gyp/test/ninja/solibs_avoid_relinking/gyptest-solibs-avoid-relinking.py
216
1427
#!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Verify that relinking a solib doesn't relink a dependent executable if the solib's public API hasn't changed. """ import os import sys import TestCommon import TestGyp # NOTE(fischman): This test will not work with other generators because the # API-hash-based-mtime-preservation optimization is only implemented in # ninja.py. It could be extended to the make.py generator as well pretty # easily, probably. # (also, it tests ninja-specific out paths, which would have to be generalized # if this was extended to other generators). test = TestGyp.TestGyp(formats=['ninja']) test.run_gyp('solibs_avoid_relinking.gyp') # Build the executable, grab its timestamp, touch the solib's source, rebuild # executable, ensure timestamp hasn't changed. test.build('solibs_avoid_relinking.gyp', 'b') test.built_file_must_exist('b' + TestCommon.exe_suffix) pre_stat = os.stat(test.built_file_path('b' + TestCommon.exe_suffix)) os.utime(os.path.join(test.workdir, 'solib.cc'), (pre_stat.st_atime, pre_stat.st_mtime + 100)) test.sleep() test.build('solibs_avoid_relinking.gyp', 'b') post_stat = os.stat(test.built_file_path('b' + TestCommon.exe_suffix)) if pre_stat.st_mtime != post_stat.st_mtime: test.fail_test() else: test.pass_test()
mit
spisneha25/django
django/views/i18n.py
82
11102
import gettext as gettext_module import importlib import json import os from django import http from django.apps import apps from django.conf import settings from django.core.urlresolvers import translate_url from django.template import Context, Engine from django.utils import six from django.utils._os import upath from django.utils.encoding import smart_text from django.utils.formats import get_format, get_format_modules from django.utils.http import is_safe_url from django.utils.translation import ( LANGUAGE_SESSION_KEY, check_for_language, get_language, to_locale, ) def set_language(request): """ Redirect to a given url while setting the chosen language in the session or cookie. The url and the language code need to be specified in the request parameters. Since this view changes how the user will see the rest of the site, it must only be accessed as a POST request. If called as a GET request, it will redirect to the page in the request (the 'next' parameter) without changing any state. """ next = request.POST.get('next', request.GET.get('next')) if not is_safe_url(url=next, host=request.get_host()): next = request.META.get('HTTP_REFERER') if not is_safe_url(url=next, host=request.get_host()): next = '/' response = http.HttpResponseRedirect(next) if request.method == 'POST': lang_code = request.POST.get('language') if lang_code and check_for_language(lang_code): next_trans = translate_url(next, lang_code) if next_trans != next: response = http.HttpResponseRedirect(next_trans) if hasattr(request, 'session'): request.session[LANGUAGE_SESSION_KEY] = lang_code else: response.set_cookie(settings.LANGUAGE_COOKIE_NAME, lang_code, max_age=settings.LANGUAGE_COOKIE_AGE, path=settings.LANGUAGE_COOKIE_PATH, domain=settings.LANGUAGE_COOKIE_DOMAIN) return response def get_formats(): """ Returns all formats strings required for i18n to work """ FORMAT_SETTINGS = ( 'DATE_FORMAT', 'DATETIME_FORMAT', 'TIME_FORMAT', 'YEAR_MONTH_FORMAT', 'MONTH_DAY_FORMAT', 'SHORT_DATE_FORMAT', 'SHORT_DATETIME_FORMAT', 'FIRST_DAY_OF_WEEK', 'DECIMAL_SEPARATOR', 'THOUSAND_SEPARATOR', 'NUMBER_GROUPING', 'DATE_INPUT_FORMATS', 'TIME_INPUT_FORMATS', 'DATETIME_INPUT_FORMATS' ) result = {} for module in [settings] + get_format_modules(reverse=True): for attr in FORMAT_SETTINGS: result[attr] = get_format(attr) formats = {} for k, v in result.items(): if isinstance(v, (six.string_types, int)): formats[k] = smart_text(v) elif isinstance(v, (tuple, list)): formats[k] = [smart_text(value) for value in v] return formats js_catalog_template = r""" {% autoescape off %} (function(globals) { var django = globals.django || (globals.django = {}); {% if plural %} django.pluralidx = function(n) { var v={{ plural }}; if (typeof(v) == 'boolean') { return v ? 1 : 0; } else { return v; } }; {% else %} django.pluralidx = function(count) { return (count == 1) ? 0 : 1; }; {% endif %} /* gettext library */ django.catalog = django.catalog || {}; {% if catalog_str %} var newcatalog = {{ catalog_str }}; for (var key in newcatalog) { django.catalog[key] = newcatalog[key]; } {% endif %} if (!django.jsi18n_initialized) { django.gettext = function(msgid) { var value = django.catalog[msgid]; if (typeof(value) == 'undefined') { return msgid; } else { return (typeof(value) == 'string') ? value : value[0]; } }; django.ngettext = function(singular, plural, count) { var value = django.catalog[singular]; if (typeof(value) == 'undefined') { return (count == 1) ? singular : plural; } else { return value[django.pluralidx(count)]; } }; django.gettext_noop = function(msgid) { return msgid; }; django.pgettext = function(context, msgid) { var value = django.gettext(context + '\x04' + msgid); if (value.indexOf('\x04') != -1) { value = msgid; } return value; }; django.npgettext = function(context, singular, plural, count) { var value = django.ngettext(context + '\x04' + singular, context + '\x04' + plural, count); if (value.indexOf('\x04') != -1) { value = django.ngettext(singular, plural, count); } return value; }; django.interpolate = function(fmt, obj, named) { if (named) { return fmt.replace(/%\(\w+\)s/g, function(match){return String(obj[match.slice(2,-2)])}); } else { return fmt.replace(/%s/g, function(match){return String(obj.shift())}); } }; /* formatting library */ django.formats = {{ formats_str }}; django.get_format = function(format_type) { var value = django.formats[format_type]; if (typeof(value) == 'undefined') { return format_type; } else { return value; } }; /* add to global namespace */ globals.pluralidx = django.pluralidx; globals.gettext = django.gettext; globals.ngettext = django.ngettext; globals.gettext_noop = django.gettext_noop; globals.pgettext = django.pgettext; globals.npgettext = django.npgettext; globals.interpolate = django.interpolate; globals.get_format = django.get_format; django.jsi18n_initialized = true; } }(this)); {% endautoescape %} """ def render_javascript_catalog(catalog=None, plural=None): template = Engine().from_string(js_catalog_template) indent = lambda s: s.replace('\n', '\n ') context = Context({ 'catalog_str': indent(json.dumps( catalog, sort_keys=True, indent=2)) if catalog else None, 'formats_str': indent(json.dumps( get_formats(), sort_keys=True, indent=2)), 'plural': plural, }) return http.HttpResponse(template.render(context), 'text/javascript') def get_javascript_catalog(locale, domain, packages): default_locale = to_locale(settings.LANGUAGE_CODE) app_configs = apps.get_app_configs() allowable_packages = set(app_config.name for app_config in app_configs) allowable_packages.add('django.conf') packages = [p for p in packages if p in allowable_packages] t = {} paths = [] en_selected = locale.startswith('en') en_catalog_missing = True # paths of requested packages for package in packages: p = importlib.import_module(package) path = os.path.join(os.path.dirname(upath(p.__file__)), 'locale') paths.append(path) # add the filesystem paths listed in the LOCALE_PATHS setting paths.extend(reversed(settings.LOCALE_PATHS)) # first load all english languages files for defaults for path in paths: try: catalog = gettext_module.translation(domain, path, ['en']) t.update(catalog._catalog) except IOError: pass else: # 'en' is the selected language and at least one of the packages # listed in `packages` has an 'en' catalog if en_selected: en_catalog_missing = False # next load the settings.LANGUAGE_CODE translations if it isn't english if default_locale != 'en': for path in paths: try: catalog = gettext_module.translation(domain, path, [default_locale]) except IOError: catalog = None if catalog is not None: t.update(catalog._catalog) # last load the currently selected language, if it isn't identical to the default. if locale != default_locale: # If the currently selected language is English but it doesn't have a # translation catalog (presumably due to being the language translated # from) then a wrong language catalog might have been loaded in the # previous step. It needs to be discarded. if en_selected and en_catalog_missing: t = {} else: locale_t = {} for path in paths: try: catalog = gettext_module.translation(domain, path, [locale]) except IOError: catalog = None if catalog is not None: locale_t.update(catalog._catalog) if locale_t: t = locale_t plural = None if '' in t: for l in t[''].split('\n'): if l.startswith('Plural-Forms:'): plural = l.split(':', 1)[1].strip() if plural is not None: # this should actually be a compiled function of a typical plural-form: # Plural-Forms: nplurals=3; plural=n%10==1 && n%100!=11 ? 0 : # n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2; plural = [el.strip() for el in plural.split(';') if el.strip().startswith('plural=')][0].split('=', 1)[1] pdict = {} maxcnts = {} catalog = {} for k, v in t.items(): if k == '': continue if isinstance(k, six.string_types): catalog[k] = v elif isinstance(k, tuple): msgid = k[0] cnt = k[1] maxcnts[msgid] = max(cnt, maxcnts.get(msgid, 0)) pdict.setdefault(msgid, {})[cnt] = v else: raise TypeError(k) for k, v in pdict.items(): catalog[k] = [v.get(i, '') for i in range(maxcnts[msgid] + 1)] return catalog, plural def null_javascript_catalog(request, domain=None, packages=None): """ Returns "identity" versions of the JavaScript i18n functions -- i.e., versions that don't actually do anything. """ return render_javascript_catalog() def javascript_catalog(request, domain='djangojs', packages=None): """ Returns the selected language catalog as a javascript library. Receives the list of packages to check for translations in the packages parameter either from an infodict or as a +-delimited string from the request. Default is 'django.conf'. Additionally you can override the gettext domain for this view, but usually you don't want to do that, as JavaScript messages go to the djangojs domain. But this might be needed if you deliver your JavaScript source from Django templates. """ locale = to_locale(get_language()) if request.GET and 'language' in request.GET: if check_for_language(request.GET['language']): locale = to_locale(request.GET['language']) if packages is None: packages = ['django.conf'] if isinstance(packages, six.string_types): packages = packages.split('+') catalog, plural = get_javascript_catalog(locale, domain, packages) return render_javascript_catalog(catalog, plural)
bsd-3-clause
aaronzirbes/ansible
contrib/inventory/jail.py
132
1288
#!/usr/bin/env python # (c) 2013, Michael Scherer <[email protected]> # # This file is part of Ansible, # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. from subprocess import Popen,PIPE import sys import json result = {} result['all'] = {} pipe = Popen(['jls', '-q', 'name'], stdout=PIPE, universal_newlines=True) result['all']['hosts'] = [x[:-1] for x in pipe.stdout.readlines()] result['all']['vars'] = {} result['all']['vars']['ansible_connection'] = 'jail' if len(sys.argv) == 2 and sys.argv[1] == '--list': print json.dumps(result) elif len(sys.argv) == 3 and sys.argv[1] == '--host': print json.dumps({'ansible_connection': 'jail'}) else: print "Need an argument, either --list or --host <host>"
gpl-3.0
dsimic/taxsims
ss.py
1
1112
import pandas as pd import numpy as np def ss_calc( contrib_yearly, inv_gwth_rt, num_years, safe_withdrw_rate, start_age=28 ): """ inv_gwth_rt is infaltion adjusted. contrib_yearly is in first years dollars """ tot_years = max(0, 62 - start_age - num_years) + num_years df = pd.DataFrame({ 'contrib_yearly': [contrib_yearly] * num_years + [0.] * max(0, (62 - num_years - start_age)), 'inv_value': [0] * tot_years, }, index=range(tot_years)) for year in range(0, tot_years): print year multiplier = np.array([ (1. + inv_gwth_rt) ** (year - y_) for y_ in range(year + 1)]) print multiplier df['inv_value'][year] = np.sum( np.array(df['contrib_yearly'][0: year + 1]) * multiplier) df['monthly_inv_income'] = safe_withdrw_rate * df['inv_value'] / 12. df['monthly_inv_income_w_spouse'] = df['monthly_inv_income'] * 1.5 return df if __name__ == "__main__": df = ss_calc(15e3, .03, 10, .03) ss_benefit_monthly = 939.00 ss_benefit_w_spouse_monthly = 1.5 * ss_benefit_monthly
gpl-2.0
makermade/arm_android-21_arm-linux-androideabi-4.8
lib/python2.7/distutils/msvc9compiler.py
148
31018
"""distutils.msvc9compiler Contains MSVCCompiler, an implementation of the abstract CCompiler class for the Microsoft Visual Studio 2008. The module is compatible with VS 2005 and VS 2008. You can find legacy support for older versions of VS in distutils.msvccompiler. """ # Written by Perry Stoll # hacked by Robin Becker and Thomas Heller to do a better job of # finding DevStudio (through the registry) # ported to VS2005 and VS 2008 by Christian Heimes __revision__ = "$Id$" import os import subprocess import sys import re from distutils.errors import (DistutilsExecError, DistutilsPlatformError, CompileError, LibError, LinkError) from distutils.ccompiler import CCompiler, gen_lib_options from distutils import log from distutils.util import get_platform import _winreg RegOpenKeyEx = _winreg.OpenKeyEx RegEnumKey = _winreg.EnumKey RegEnumValue = _winreg.EnumValue RegError = _winreg.error HKEYS = (_winreg.HKEY_USERS, _winreg.HKEY_CURRENT_USER, _winreg.HKEY_LOCAL_MACHINE, _winreg.HKEY_CLASSES_ROOT) NATIVE_WIN64 = (sys.platform == 'win32' and sys.maxsize > 2**32) if NATIVE_WIN64: # Visual C++ is a 32-bit application, so we need to look in # the corresponding registry branch, if we're running a # 64-bit Python on Win64 VS_BASE = r"Software\Wow6432Node\Microsoft\VisualStudio\%0.1f" VSEXPRESS_BASE = r"Software\Wow6432Node\Microsoft\VCExpress\%0.1f" WINSDK_BASE = r"Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows" NET_BASE = r"Software\Wow6432Node\Microsoft\.NETFramework" else: VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f" VSEXPRESS_BASE = r"Software\Microsoft\VCExpress\%0.1f" WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows" NET_BASE = r"Software\Microsoft\.NETFramework" # A map keyed by get_platform() return values to values accepted by # 'vcvarsall.bat'. Note a cross-compile may combine these (eg, 'x86_amd64' is # the param to cross-compile on x86 targetting amd64.) PLAT_TO_VCVARS = { 'win32' : 'x86', 'win-amd64' : 'amd64', 'win-ia64' : 'ia64', } class Reg: """Helper class to read values from the registry """ def get_value(cls, path, key): for base in HKEYS: d = cls.read_values(base, path) if d and key in d: return d[key] raise KeyError(key) get_value = classmethod(get_value) def read_keys(cls, base, key): """Return list of registry keys.""" try: handle = RegOpenKeyEx(base, key) except RegError: return None L = [] i = 0 while True: try: k = RegEnumKey(handle, i) except RegError: break L.append(k) i += 1 return L read_keys = classmethod(read_keys) def read_values(cls, base, key): """Return dict of registry keys and values. All names are converted to lowercase. """ try: handle = RegOpenKeyEx(base, key) except RegError: return None d = {} i = 0 while True: try: name, value, type = RegEnumValue(handle, i) except RegError: break name = name.lower() d[cls.convert_mbcs(name)] = cls.convert_mbcs(value) i += 1 return d read_values = classmethod(read_values) def convert_mbcs(s): dec = getattr(s, "decode", None) if dec is not None: try: s = dec("mbcs") except UnicodeError: pass return s convert_mbcs = staticmethod(convert_mbcs) class MacroExpander: def __init__(self, version): self.macros = {} self.vsbase = VS_BASE % version self.load_macros(version) def set_macro(self, macro, path, key): self.macros["$(%s)" % macro] = Reg.get_value(path, key) def load_macros(self, version): self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir") self.set_macro("VSInstallDir", self.vsbase + r"\Setup\VS", "productdir") self.set_macro("FrameworkDir", NET_BASE, "installroot") try: if version >= 8.0: self.set_macro("FrameworkSDKDir", NET_BASE, "sdkinstallrootv2.0") else: raise KeyError("sdkinstallrootv2.0") except KeyError: raise DistutilsPlatformError( """Python was built with Visual Studio 2008; extensions must be built with a compiler than can generate compatible binaries. Visual Studio 2008 was not found on this system. If you have Cygwin installed, you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""") if version >= 9.0: self.set_macro("FrameworkVersion", self.vsbase, "clr version") self.set_macro("WindowsSdkDir", WINSDK_BASE, "currentinstallfolder") else: p = r"Software\Microsoft\NET Framework Setup\Product" for base in HKEYS: try: h = RegOpenKeyEx(base, p) except RegError: continue key = RegEnumKey(h, 0) d = Reg.get_value(base, r"%s\%s" % (p, key)) self.macros["$(FrameworkVersion)"] = d["version"] def sub(self, s): for k, v in self.macros.items(): s = s.replace(k, v) return s def get_build_version(): """Return the version of MSVC that was used to build Python. For Python 2.3 and up, the version number is included in sys.version. For earlier versions, assume the compiler is MSVC 6. """ prefix = "MSC v." i = sys.version.find(prefix) if i == -1: return 6 i = i + len(prefix) s, rest = sys.version[i:].split(" ", 1) majorVersion = int(s[:-2]) - 6 minorVersion = int(s[2:3]) / 10.0 # I don't think paths are affected by minor version in version 6 if majorVersion == 6: minorVersion = 0 if majorVersion >= 6: return majorVersion + minorVersion # else we don't know what version of the compiler this is return None def normalize_and_reduce_paths(paths): """Return a list of normalized paths with duplicates removed. The current order of paths is maintained. """ # Paths are normalized so things like: /a and /a/ aren't both preserved. reduced_paths = [] for p in paths: np = os.path.normpath(p) # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set. if np not in reduced_paths: reduced_paths.append(np) return reduced_paths def removeDuplicates(variable): """Remove duplicate values of an environment variable. """ oldList = variable.split(os.pathsep) newList = [] for i in oldList: if i not in newList: newList.append(i) newVariable = os.pathsep.join(newList) return newVariable def find_vcvarsall(version): """Find the vcvarsall.bat file At first it tries to find the productdir of VS 2008 in the registry. If that fails it falls back to the VS90COMNTOOLS env var. """ vsbase = VS_BASE % version try: productdir = Reg.get_value(r"%s\Setup\VC" % vsbase, "productdir") except KeyError: productdir = None # trying Express edition if productdir is None: vsbase = VSEXPRESS_BASE % version try: productdir = Reg.get_value(r"%s\Setup\VC" % vsbase, "productdir") except KeyError: productdir = None log.debug("Unable to find productdir in registry") if not productdir or not os.path.isdir(productdir): toolskey = "VS%0.f0COMNTOOLS" % version toolsdir = os.environ.get(toolskey, None) if toolsdir and os.path.isdir(toolsdir): productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC") productdir = os.path.abspath(productdir) if not os.path.isdir(productdir): log.debug("%s is not a valid directory" % productdir) return None else: log.debug("Env var %s is not set or invalid" % toolskey) if not productdir: log.debug("No productdir found") return None vcvarsall = os.path.join(productdir, "vcvarsall.bat") if os.path.isfile(vcvarsall): return vcvarsall log.debug("Unable to find vcvarsall.bat") return None def query_vcvarsall(version, arch="x86"): """Launch vcvarsall.bat and read the settings from its environment """ vcvarsall = find_vcvarsall(version) interesting = set(("include", "lib", "libpath", "path")) result = {} if vcvarsall is None: raise DistutilsPlatformError("Unable to find vcvarsall.bat") log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version) popen = subprocess.Popen('"%s" %s & set' % (vcvarsall, arch), stdout=subprocess.PIPE, stderr=subprocess.PIPE) try: stdout, stderr = popen.communicate() if popen.wait() != 0: raise DistutilsPlatformError(stderr.decode("mbcs")) stdout = stdout.decode("mbcs") for line in stdout.split("\n"): line = Reg.convert_mbcs(line) if '=' not in line: continue line = line.strip() key, value = line.split('=', 1) key = key.lower() if key in interesting: if value.endswith(os.pathsep): value = value[:-1] result[key] = removeDuplicates(value) finally: popen.stdout.close() popen.stderr.close() if len(result) != len(interesting): raise ValueError(str(list(result.keys()))) return result # More globals VERSION = get_build_version() if VERSION < 8.0: raise DistutilsPlatformError("VC %0.1f is not supported by this module" % VERSION) # MACROS = MacroExpander(VERSION) class MSVCCompiler(CCompiler) : """Concrete class that implements an interface to Microsoft Visual C++, as defined by the CCompiler abstract class.""" compiler_type = 'msvc' # Just set this so CCompiler's constructor doesn't barf. We currently # don't use the 'set_executables()' bureaucracy provided by CCompiler, # as it really isn't necessary for this sort of single-compiler class. # Would be nice to have a consistent interface with UnixCCompiler, # though, so it's worth thinking about. executables = {} # Private class data (need to distinguish C from C++ source for compiler) _c_extensions = ['.c'] _cpp_extensions = ['.cc', '.cpp', '.cxx'] _rc_extensions = ['.rc'] _mc_extensions = ['.mc'] # Needed for the filename generation methods provided by the # base class, CCompiler. src_extensions = (_c_extensions + _cpp_extensions + _rc_extensions + _mc_extensions) res_extension = '.res' obj_extension = '.obj' static_lib_extension = '.lib' shared_lib_extension = '.dll' static_lib_format = shared_lib_format = '%s%s' exe_extension = '.exe' def __init__(self, verbose=0, dry_run=0, force=0): CCompiler.__init__ (self, verbose, dry_run, force) self.__version = VERSION self.__root = r"Software\Microsoft\VisualStudio" # self.__macros = MACROS self.__paths = [] # target platform (.plat_name is consistent with 'bdist') self.plat_name = None self.__arch = None # deprecated name self.initialized = False def initialize(self, plat_name=None): # multi-init means we would need to check platform same each time... assert not self.initialized, "don't init multiple times" if plat_name is None: plat_name = get_platform() # sanity check for platforms to prevent obscure errors later. ok_plats = 'win32', 'win-amd64', 'win-ia64' if plat_name not in ok_plats: raise DistutilsPlatformError("--plat-name must be one of %s" % (ok_plats,)) if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"): # Assume that the SDK set up everything alright; don't try to be # smarter self.cc = "cl.exe" self.linker = "link.exe" self.lib = "lib.exe" self.rc = "rc.exe" self.mc = "mc.exe" else: # On x86, 'vcvars32.bat amd64' creates an env that doesn't work; # to cross compile, you use 'x86_amd64'. # On AMD64, 'vcvars32.bat amd64' is a native build env; to cross # compile use 'x86' (ie, it runs the x86 compiler directly) # No idea how itanium handles this, if at all. if plat_name == get_platform() or plat_name == 'win32': # native build or cross-compile to win32 plat_spec = PLAT_TO_VCVARS[plat_name] else: # cross compile from win32 -> some 64bit plat_spec = PLAT_TO_VCVARS[get_platform()] + '_' + \ PLAT_TO_VCVARS[plat_name] vc_env = query_vcvarsall(VERSION, plat_spec) # take care to only use strings in the environment. self.__paths = vc_env['path'].encode('mbcs').split(os.pathsep) os.environ['lib'] = vc_env['lib'].encode('mbcs') os.environ['include'] = vc_env['include'].encode('mbcs') if len(self.__paths) == 0: raise DistutilsPlatformError("Python was built with %s, " "and extensions need to be built with the same " "version of the compiler, but it isn't installed." % self.__product) self.cc = self.find_exe("cl.exe") self.linker = self.find_exe("link.exe") self.lib = self.find_exe("lib.exe") self.rc = self.find_exe("rc.exe") # resource compiler self.mc = self.find_exe("mc.exe") # message compiler #self.set_path_env_var('lib') #self.set_path_env_var('include') # extend the MSVC path with the current path try: for p in os.environ['path'].split(';'): self.__paths.append(p) except KeyError: pass self.__paths = normalize_and_reduce_paths(self.__paths) os.environ['path'] = ";".join(self.__paths) self.preprocess_options = None if self.__arch == "x86": self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/DNDEBUG'] self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/Z7', '/D_DEBUG'] else: # Win64 self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GS-' , '/DNDEBUG'] self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-', '/Z7', '/D_DEBUG'] self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO'] if self.__version >= 7: self.ldflags_shared_debug = [ '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG', '/pdb:None' ] self.ldflags_static = [ '/nologo'] self.initialized = True # -- Worker methods ------------------------------------------------ def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): # Copied from ccompiler.py, extended to return .res as 'object'-file # for .rc input file if output_dir is None: output_dir = '' obj_names = [] for src_name in source_filenames: (base, ext) = os.path.splitext (src_name) base = os.path.splitdrive(base)[1] # Chop off the drive base = base[os.path.isabs(base):] # If abs, chop off leading / if ext not in self.src_extensions: # Better to raise an exception instead of silently continuing # and later complain about sources and targets having # different lengths raise CompileError ("Don't know how to compile %s" % src_name) if strip_dir: base = os.path.basename (base) if ext in self._rc_extensions: obj_names.append (os.path.join (output_dir, base + self.res_extension)) elif ext in self._mc_extensions: obj_names.append (os.path.join (output_dir, base + self.res_extension)) else: obj_names.append (os.path.join (output_dir, base + self.obj_extension)) return obj_names def compile(self, sources, output_dir=None, macros=None, include_dirs=None, debug=0, extra_preargs=None, extra_postargs=None, depends=None): if not self.initialized: self.initialize() compile_info = self._setup_compile(output_dir, macros, include_dirs, sources, depends, extra_postargs) macros, objects, extra_postargs, pp_opts, build = compile_info compile_opts = extra_preargs or [] compile_opts.append ('/c') if debug: compile_opts.extend(self.compile_options_debug) else: compile_opts.extend(self.compile_options) for obj in objects: try: src, ext = build[obj] except KeyError: continue if debug: # pass the full pathname to MSVC in debug mode, # this allows the debugger to find the source file # without asking the user to browse for it src = os.path.abspath(src) if ext in self._c_extensions: input_opt = "/Tc" + src elif ext in self._cpp_extensions: input_opt = "/Tp" + src elif ext in self._rc_extensions: # compile .RC to .RES file input_opt = src output_opt = "/fo" + obj try: self.spawn([self.rc] + pp_opts + [output_opt] + [input_opt]) except DistutilsExecError, msg: raise CompileError(msg) continue elif ext in self._mc_extensions: # Compile .MC to .RC file to .RES file. # * '-h dir' specifies the directory for the # generated include file # * '-r dir' specifies the target directory of the # generated RC file and the binary message resource # it includes # # For now (since there are no options to change this), # we use the source-directory for the include file and # the build directory for the RC file and message # resources. This works at least for win32all. h_dir = os.path.dirname(src) rc_dir = os.path.dirname(obj) try: # first compile .MC to .RC and .H file self.spawn([self.mc] + ['-h', h_dir, '-r', rc_dir] + [src]) base, _ = os.path.splitext (os.path.basename (src)) rc_file = os.path.join (rc_dir, base + '.rc') # then compile .RC to .RES file self.spawn([self.rc] + ["/fo" + obj] + [rc_file]) except DistutilsExecError, msg: raise CompileError(msg) continue else: # how to handle this file? raise CompileError("Don't know how to compile %s to %s" % (src, obj)) output_opt = "/Fo" + obj try: self.spawn([self.cc] + compile_opts + pp_opts + [input_opt, output_opt] + extra_postargs) except DistutilsExecError, msg: raise CompileError(msg) return objects def create_static_lib(self, objects, output_libname, output_dir=None, debug=0, target_lang=None): if not self.initialized: self.initialize() (objects, output_dir) = self._fix_object_args(objects, output_dir) output_filename = self.library_filename(output_libname, output_dir=output_dir) if self._need_link(objects, output_filename): lib_args = objects + ['/OUT:' + output_filename] if debug: pass # XXX what goes here? try: self.spawn([self.lib] + lib_args) except DistutilsExecError, msg: raise LibError(msg) else: log.debug("skipping %s (up-to-date)", output_filename) def link(self, target_desc, objects, output_filename, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None): if not self.initialized: self.initialize() (objects, output_dir) = self._fix_object_args(objects, output_dir) fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs) (libraries, library_dirs, runtime_library_dirs) = fixed_args if runtime_library_dirs: self.warn ("I don't know what to do with 'runtime_library_dirs': " + str (runtime_library_dirs)) lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries) if output_dir is not None: output_filename = os.path.join(output_dir, output_filename) if self._need_link(objects, output_filename): if target_desc == CCompiler.EXECUTABLE: if debug: ldflags = self.ldflags_shared_debug[1:] else: ldflags = self.ldflags_shared[1:] else: if debug: ldflags = self.ldflags_shared_debug else: ldflags = self.ldflags_shared export_opts = [] for sym in (export_symbols or []): export_opts.append("/EXPORT:" + sym) ld_args = (ldflags + lib_opts + export_opts + objects + ['/OUT:' + output_filename]) # The MSVC linker generates .lib and .exp files, which cannot be # suppressed by any linker switches. The .lib files may even be # needed! Make sure they are generated in the temporary build # directory. Since they have different names for debug and release # builds, they can go into the same directory. build_temp = os.path.dirname(objects[0]) if export_symbols is not None: (dll_name, dll_ext) = os.path.splitext( os.path.basename(output_filename)) implib_file = os.path.join( build_temp, self.library_filename(dll_name)) ld_args.append ('/IMPLIB:' + implib_file) self.manifest_setup_ldargs(output_filename, build_temp, ld_args) if extra_preargs: ld_args[:0] = extra_preargs if extra_postargs: ld_args.extend(extra_postargs) self.mkpath(os.path.dirname(output_filename)) try: self.spawn([self.linker] + ld_args) except DistutilsExecError, msg: raise LinkError(msg) # embed the manifest # XXX - this is somewhat fragile - if mt.exe fails, distutils # will still consider the DLL up-to-date, but it will not have a # manifest. Maybe we should link to a temp file? OTOH, that # implies a build environment error that shouldn't go undetected. mfinfo = self.manifest_get_embed_info(target_desc, ld_args) if mfinfo is not None: mffilename, mfid = mfinfo out_arg = '-outputresource:%s;%s' % (output_filename, mfid) try: self.spawn(['mt.exe', '-nologo', '-manifest', mffilename, out_arg]) except DistutilsExecError, msg: raise LinkError(msg) else: log.debug("skipping %s (up-to-date)", output_filename) def manifest_setup_ldargs(self, output_filename, build_temp, ld_args): # If we need a manifest at all, an embedded manifest is recommended. # See MSDN article titled # "How to: Embed a Manifest Inside a C/C++ Application" # (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx) # Ask the linker to generate the manifest in the temp dir, so # we can check it, and possibly embed it, later. temp_manifest = os.path.join( build_temp, os.path.basename(output_filename) + ".manifest") ld_args.append('/MANIFESTFILE:' + temp_manifest) def manifest_get_embed_info(self, target_desc, ld_args): # If a manifest should be embedded, return a tuple of # (manifest_filename, resource_id). Returns None if no manifest # should be embedded. See http://bugs.python.org/issue7833 for why # we want to avoid any manifest for extension modules if we can) for arg in ld_args: if arg.startswith("/MANIFESTFILE:"): temp_manifest = arg.split(":", 1)[1] break else: # no /MANIFESTFILE so nothing to do. return None if target_desc == CCompiler.EXECUTABLE: # by default, executables always get the manifest with the # CRT referenced. mfid = 1 else: # Extension modules try and avoid any manifest if possible. mfid = 2 temp_manifest = self._remove_visual_c_ref(temp_manifest) if temp_manifest is None: return None return temp_manifest, mfid def _remove_visual_c_ref(self, manifest_file): try: # Remove references to the Visual C runtime, so they will # fall through to the Visual C dependency of Python.exe. # This way, when installed for a restricted user (e.g. # runtimes are not in WinSxS folder, but in Python's own # folder), the runtimes do not need to be in every folder # with .pyd's. # Returns either the filename of the modified manifest or # None if no manifest should be embedded. manifest_f = open(manifest_file) try: manifest_buf = manifest_f.read() finally: manifest_f.close() pattern = re.compile( r"""<assemblyIdentity.*?name=("|')Microsoft\."""\ r"""VC\d{2}\.CRT("|').*?(/>|</assemblyIdentity>)""", re.DOTALL) manifest_buf = re.sub(pattern, "", manifest_buf) pattern = "<dependentAssembly>\s*</dependentAssembly>" manifest_buf = re.sub(pattern, "", manifest_buf) # Now see if any other assemblies are referenced - if not, we # don't want a manifest embedded. pattern = re.compile( r"""<assemblyIdentity.*?name=(?:"|')(.+?)(?:"|')""" r""".*?(?:/>|</assemblyIdentity>)""", re.DOTALL) if re.search(pattern, manifest_buf) is None: return None manifest_f = open(manifest_file, 'w') try: manifest_f.write(manifest_buf) return manifest_file finally: manifest_f.close() except IOError: pass # -- Miscellaneous methods ----------------------------------------- # These are all used by the 'gen_lib_options() function, in # ccompiler.py. def library_dir_option(self, dir): return "/LIBPATH:" + dir def runtime_library_dir_option(self, dir): raise DistutilsPlatformError( "don't know how to set runtime library search path for MSVC++") def library_option(self, lib): return self.library_filename(lib) def find_library_file(self, dirs, lib, debug=0): # Prefer a debugging library if found (and requested), but deal # with it if we don't have one. if debug: try_names = [lib + "_d", lib] else: try_names = [lib] for dir in dirs: for name in try_names: libfile = os.path.join(dir, self.library_filename (name)) if os.path.exists(libfile): return libfile else: # Oops, didn't find it in *any* of 'dirs' return None # Helper methods for using the MSVC registry settings def find_exe(self, exe): """Return path to an MSVC executable program. Tries to find the program in several places: first, one of the MSVC program search paths from the registry; next, the directories in the PATH environment variable. If any of those work, return an absolute path that is known to exist. If none of them work, just return the original program name, 'exe'. """ for p in self.__paths: fn = os.path.join(os.path.abspath(p), exe) if os.path.isfile(fn): return fn # didn't find it; try existing path for p in os.environ['Path'].split(';'): fn = os.path.join(os.path.abspath(p),exe) if os.path.isfile(fn): return fn return exe
gpl-2.0
pacoqueen/bbinn
PyChart-1.39/demos/linestyles.py
1
1258
#!/usr/bin/env python # -*- coding: utf-8 -*- import sys sys.path.append("..") # # Copyright (C) 2000-2005 by Yasushi Saito ([email protected]) # # Pychart is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the # Free Software Foundation; either version 2, or (at your option) any # later version. # # Pychart is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License # for more details. # from pychart import * import pychart.doc_support import chartdemo import re can = canvas.default_canvas() x = 100 y = 500 def drawLine(style): global x, y name = pychart.doc_support.stringify_value(style) name = re.sub("line_style\\.", "", name) name = pychart.doc_support.break_string(name) can.line(style, x, y, x+40, y) #print "name=", name height = font.text_height(name)[0] + 5 tb = text_box.T(text=name, loc=(x, y-height), line_style=None) x = x + 60 tb.draw() for style in line_style.standards.list(): drawLine(style) if x >= chartdemo.MaxWidth: x=100 y=y-40
gpl-2.0
leiferikb/bitpop
src/third_party/WebKit/Tools/Scripts/webkitpy/common/checkout/scm/scm_mock.py
2
4048
# Copyright (C) 2011 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from webkitpy.common.system.filesystem_mock import MockFileSystem from webkitpy.common.system.executive_mock import MockExecutive class MockSCM(object): executable_name = "MockSCM" def __init__(self, filesystem=None, executive=None): self.checkout_root = "/mock-checkout/third_party/WebKit" self.added_paths = set() self._filesystem = filesystem or MockFileSystem() self._executive = executive or MockExecutive() def add(self, destination_path, return_exit_code=False): self.add_list([destination_path], return_exit_code) def add_list(self, destination_paths, return_exit_code=False): self.added_paths.update(set(destination_paths)) if return_exit_code: return 0 def has_working_directory_changes(self): return False def ensure_cleanly_tracking_remote_master(self): pass def current_branch(self): return "mock-branch-name" def checkout_branch(self, name): pass def create_clean_branch(self, name): pass def delete_branch(self, name): pass def supports_local_commits(self): return True def exists(self, path): # TestRealMain.test_real_main (and several other rebaseline tests) are sensitive to this return value. # We should make those tests more robust, but for now we just return True always (since no test needs otherwise). return True def absolute_path(self, *comps): return self._filesystem.join(self.checkout_root, *comps) def svn_revision(self, path): return '5678' def svn_revision_from_git_commit(self, git_commit): if git_commit == '6469e754a1': return 1234 if git_commit == '624c3081c0': return 5678 if git_commit == '624caaaaaa': return 10000 return None def timestamp_of_revision(self, path, revision): return '2013-02-01 08:48:05 +0000' def commit_locally_with_message(self, message, commit_all_working_directory_changes=True): pass def delete(self, path): return self.delete_list([path]) def delete_list(self, paths): if not self._filesystem: return for path in paths: if self._filesystem.exists(path): self._filesystem.remove(path) def move(self, origin, destination): if self._filesystem: self._filesystem.move(self.absolute_path(origin), self.absolute_path(destination))
gpl-3.0
kookie424/googletest
test/gtest_test_utils.py
674
10826
#!/usr/bin/env python # # Copyright 2006, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Unit test utilities for Google C++ Testing Framework.""" __author__ = '[email protected] (Zhanyong Wan)' import atexit import os import shutil import sys import tempfile import unittest _test_module = unittest # Suppresses the 'Import not at the top of the file' lint complaint. # pylint: disable-msg=C6204 try: import subprocess _SUBPROCESS_MODULE_AVAILABLE = True except: import popen2 _SUBPROCESS_MODULE_AVAILABLE = False # pylint: enable-msg=C6204 GTEST_OUTPUT_VAR_NAME = 'GTEST_OUTPUT' IS_WINDOWS = os.name == 'nt' IS_CYGWIN = os.name == 'posix' and 'CYGWIN' in os.uname()[0] # The environment variable for specifying the path to the premature-exit file. PREMATURE_EXIT_FILE_ENV_VAR = 'TEST_PREMATURE_EXIT_FILE' environ = os.environ.copy() def SetEnvVar(env_var, value): """Sets/unsets an environment variable to a given value.""" if value is not None: environ[env_var] = value elif env_var in environ: del environ[env_var] # Here we expose a class from a particular module, depending on the # environment. The comment suppresses the 'Invalid variable name' lint # complaint. TestCase = _test_module.TestCase # pylint: disable-msg=C6409 # Initially maps a flag to its default value. After # _ParseAndStripGTestFlags() is called, maps a flag to its actual value. _flag_map = {'source_dir': os.path.dirname(sys.argv[0]), 'build_dir': os.path.dirname(sys.argv[0])} _gtest_flags_are_parsed = False def _ParseAndStripGTestFlags(argv): """Parses and strips Google Test flags from argv. This is idempotent.""" # Suppresses the lint complaint about a global variable since we need it # here to maintain module-wide state. global _gtest_flags_are_parsed # pylint: disable-msg=W0603 if _gtest_flags_are_parsed: return _gtest_flags_are_parsed = True for flag in _flag_map: # The environment variable overrides the default value. if flag.upper() in os.environ: _flag_map[flag] = os.environ[flag.upper()] # The command line flag overrides the environment variable. i = 1 # Skips the program name. while i < len(argv): prefix = '--' + flag + '=' if argv[i].startswith(prefix): _flag_map[flag] = argv[i][len(prefix):] del argv[i] break else: # We don't increment i in case we just found a --gtest_* flag # and removed it from argv. i += 1 def GetFlag(flag): """Returns the value of the given flag.""" # In case GetFlag() is called before Main(), we always call # _ParseAndStripGTestFlags() here to make sure the --gtest_* flags # are parsed. _ParseAndStripGTestFlags(sys.argv) return _flag_map[flag] def GetSourceDir(): """Returns the absolute path of the directory where the .py files are.""" return os.path.abspath(GetFlag('source_dir')) def GetBuildDir(): """Returns the absolute path of the directory where the test binaries are.""" return os.path.abspath(GetFlag('build_dir')) _temp_dir = None def _RemoveTempDir(): if _temp_dir: shutil.rmtree(_temp_dir, ignore_errors=True) atexit.register(_RemoveTempDir) def GetTempDir(): """Returns a directory for temporary files.""" global _temp_dir if not _temp_dir: _temp_dir = tempfile.mkdtemp() return _temp_dir def GetTestExecutablePath(executable_name, build_dir=None): """Returns the absolute path of the test binary given its name. The function will print a message and abort the program if the resulting file doesn't exist. Args: executable_name: name of the test binary that the test script runs. build_dir: directory where to look for executables, by default the result of GetBuildDir(). Returns: The absolute path of the test binary. """ path = os.path.abspath(os.path.join(build_dir or GetBuildDir(), executable_name)) if (IS_WINDOWS or IS_CYGWIN) and not path.endswith('.exe'): path += '.exe' if not os.path.exists(path): message = ( 'Unable to find the test binary "%s". Please make sure to provide\n' 'a path to the binary via the --build_dir flag or the BUILD_DIR\n' 'environment variable.' % path) print >> sys.stderr, message sys.exit(1) return path def GetExitStatus(exit_code): """Returns the argument to exit(), or -1 if exit() wasn't called. Args: exit_code: the result value of os.system(command). """ if os.name == 'nt': # On Windows, os.WEXITSTATUS() doesn't work and os.system() returns # the argument to exit() directly. return exit_code else: # On Unix, os.WEXITSTATUS() must be used to extract the exit status # from the result of os.system(). if os.WIFEXITED(exit_code): return os.WEXITSTATUS(exit_code) else: return -1 class Subprocess: def __init__(self, command, working_dir=None, capture_stderr=True, env=None): """Changes into a specified directory, if provided, and executes a command. Restores the old directory afterwards. Args: command: The command to run, in the form of sys.argv. working_dir: The directory to change into. capture_stderr: Determines whether to capture stderr in the output member or to discard it. env: Dictionary with environment to pass to the subprocess. Returns: An object that represents outcome of the executed process. It has the following attributes: terminated_by_signal True iff the child process has been terminated by a signal. signal Sygnal that terminated the child process. exited True iff the child process exited normally. exit_code The code with which the child process exited. output Child process's stdout and stderr output combined in a string. """ # The subprocess module is the preferrable way of running programs # since it is available and behaves consistently on all platforms, # including Windows. But it is only available starting in python 2.4. # In earlier python versions, we revert to the popen2 module, which is # available in python 2.0 and later but doesn't provide required # functionality (Popen4) under Windows. This allows us to support Mac # OS X 10.4 Tiger, which has python 2.3 installed. if _SUBPROCESS_MODULE_AVAILABLE: if capture_stderr: stderr = subprocess.STDOUT else: stderr = subprocess.PIPE p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=stderr, cwd=working_dir, universal_newlines=True, env=env) # communicate returns a tuple with the file obect for the child's # output. self.output = p.communicate()[0] self._return_code = p.returncode else: old_dir = os.getcwd() def _ReplaceEnvDict(dest, src): # Changes made by os.environ.clear are not inheritable by child # processes until Python 2.6. To produce inheritable changes we have # to delete environment items with the del statement. for key in dest.keys(): del dest[key] dest.update(src) # When 'env' is not None, backup the environment variables and replace # them with the passed 'env'. When 'env' is None, we simply use the # current 'os.environ' for compatibility with the subprocess.Popen # semantics used above. if env is not None: old_environ = os.environ.copy() _ReplaceEnvDict(os.environ, env) try: if working_dir is not None: os.chdir(working_dir) if capture_stderr: p = popen2.Popen4(command) else: p = popen2.Popen3(command) p.tochild.close() self.output = p.fromchild.read() ret_code = p.wait() finally: os.chdir(old_dir) # Restore the old environment variables # if they were replaced. if env is not None: _ReplaceEnvDict(os.environ, old_environ) # Converts ret_code to match the semantics of # subprocess.Popen.returncode. if os.WIFSIGNALED(ret_code): self._return_code = -os.WTERMSIG(ret_code) else: # os.WIFEXITED(ret_code) should return True here. self._return_code = os.WEXITSTATUS(ret_code) if self._return_code < 0: self.terminated_by_signal = True self.exited = False self.signal = -self._return_code else: self.terminated_by_signal = False self.exited = True self.exit_code = self._return_code def Main(): """Runs the unit test.""" # We must call _ParseAndStripGTestFlags() before calling # unittest.main(). Otherwise the latter will be confused by the # --gtest_* flags. _ParseAndStripGTestFlags(sys.argv) # The tested binaries should not be writing XML output files unless the # script explicitly instructs them to. # TODO([email protected]): Move this into Subprocess when we implement # passing environment into it as a parameter. if GTEST_OUTPUT_VAR_NAME in os.environ: del os.environ[GTEST_OUTPUT_VAR_NAME] _test_module.main()
bsd-3-clause
40223247/test2
static/Brython3.1.1-20150328-091302/Lib/fractions.py
722
23203
# Originally contributed by Sjoerd Mullender. # Significantly modified by Jeffrey Yasskin <jyasskin at gmail.com>. """Fraction, infinite-precision, real numbers.""" from decimal import Decimal import math import numbers import operator import re import sys __all__ = ['Fraction', 'gcd'] def gcd(a, b): """Calculate the Greatest Common Divisor of a and b. Unless b==0, the result will have the same sign as b (so that when b is divided by it, the result comes out positive). """ while b: a, b = b, a%b return a # Constants related to the hash implementation; hash(x) is based # on the reduction of x modulo the prime _PyHASH_MODULUS. _PyHASH_MODULUS = sys.hash_info.modulus # Value to be used for rationals that reduce to infinity modulo # _PyHASH_MODULUS. _PyHASH_INF = sys.hash_info.inf _RATIONAL_FORMAT = re.compile(r""" \A\s* # optional whitespace at the start, then (?P<sign>[-+]?) # an optional sign, then (?=\d|\.\d) # lookahead for digit or .digit (?P<num>\d*) # numerator (possibly empty) (?: # followed by (?:/(?P<denom>\d+))? # an optional denominator | # or (?:\.(?P<decimal>\d*))? # an optional fractional part (?:E(?P<exp>[-+]?\d+))? # and optional exponent ) \s*\Z # and optional whitespace to finish """, re.VERBOSE | re.IGNORECASE) class Fraction(numbers.Rational): """This class implements rational numbers. In the two-argument form of the constructor, Fraction(8, 6) will produce a rational number equivalent to 4/3. Both arguments must be Rational. The numerator defaults to 0 and the denominator defaults to 1 so that Fraction(3) == 3 and Fraction() == 0. Fractions can also be constructed from: - numeric strings similar to those accepted by the float constructor (for example, '-2.3' or '1e10') - strings of the form '123/456' - float and Decimal instances - other Rational instances (including integers) """ __slots__ = ('_numerator', '_denominator') # We're immutable, so use __new__ not __init__ def __new__(cls, numerator=0, denominator=None): """Constructs a Rational. Takes a string like '3/2' or '1.5', another Rational instance, a numerator/denominator pair, or a float. Examples -------- >>> Fraction(10, -8) Fraction(-5, 4) >>> Fraction(Fraction(1, 7), 5) Fraction(1, 35) >>> Fraction(Fraction(1, 7), Fraction(2, 3)) Fraction(3, 14) >>> Fraction('314') Fraction(314, 1) >>> Fraction('-35/4') Fraction(-35, 4) >>> Fraction('3.1415') # conversion from numeric string Fraction(6283, 2000) >>> Fraction('-47e-2') # string may include a decimal exponent Fraction(-47, 100) >>> Fraction(1.47) # direct construction from float (exact conversion) Fraction(6620291452234629, 4503599627370496) >>> Fraction(2.25) Fraction(9, 4) >>> Fraction(Decimal('1.47')) Fraction(147, 100) """ self = super(Fraction, cls).__new__(cls) if denominator is None: if isinstance(numerator, numbers.Rational): self._numerator = numerator.numerator self._denominator = numerator.denominator return self elif isinstance(numerator, float): # Exact conversion from float value = Fraction.from_float(numerator) self._numerator = value._numerator self._denominator = value._denominator return self elif isinstance(numerator, Decimal): value = Fraction.from_decimal(numerator) self._numerator = value._numerator self._denominator = value._denominator return self elif isinstance(numerator, str): # Handle construction from strings. m = _RATIONAL_FORMAT.match(numerator) if m is None: raise ValueError('Invalid literal for Fraction: %r' % numerator) numerator = int(m.group('num') or '0') denom = m.group('denom') if denom: denominator = int(denom) else: denominator = 1 decimal = m.group('decimal') if decimal: scale = 10**len(decimal) numerator = numerator * scale + int(decimal) denominator *= scale exp = m.group('exp') if exp: exp = int(exp) if exp >= 0: numerator *= 10**exp else: denominator *= 10**-exp if m.group('sign') == '-': numerator = -numerator else: raise TypeError("argument should be a string " "or a Rational instance") elif (isinstance(numerator, numbers.Rational) and isinstance(denominator, numbers.Rational)): numerator, denominator = ( numerator.numerator * denominator.denominator, denominator.numerator * numerator.denominator ) else: raise TypeError("both arguments should be " "Rational instances") if denominator == 0: raise ZeroDivisionError('Fraction(%s, 0)' % numerator) g = gcd(numerator, denominator) self._numerator = numerator // g self._denominator = denominator // g return self @classmethod def from_float(cls, f): """Converts a finite float to a rational number, exactly. Beware that Fraction.from_float(0.3) != Fraction(3, 10). """ if isinstance(f, numbers.Integral): return cls(f) elif not isinstance(f, float): raise TypeError("%s.from_float() only takes floats, not %r (%s)" % (cls.__name__, f, type(f).__name__)) if math.isnan(f): raise ValueError("Cannot convert %r to %s." % (f, cls.__name__)) if math.isinf(f): raise OverflowError("Cannot convert %r to %s." % (f, cls.__name__)) return cls(*f.as_integer_ratio()) @classmethod def from_decimal(cls, dec): """Converts a finite Decimal instance to a rational number, exactly.""" from decimal import Decimal if isinstance(dec, numbers.Integral): dec = Decimal(int(dec)) elif not isinstance(dec, Decimal): raise TypeError( "%s.from_decimal() only takes Decimals, not %r (%s)" % (cls.__name__, dec, type(dec).__name__)) if dec.is_infinite(): raise OverflowError( "Cannot convert %s to %s." % (dec, cls.__name__)) if dec.is_nan(): raise ValueError("Cannot convert %s to %s." % (dec, cls.__name__)) sign, digits, exp = dec.as_tuple() digits = int(''.join(map(str, digits))) if sign: digits = -digits if exp >= 0: return cls(digits * 10 ** exp) else: return cls(digits, 10 ** -exp) def limit_denominator(self, max_denominator=1000000): """Closest Fraction to self with denominator at most max_denominator. >>> Fraction('3.141592653589793').limit_denominator(10) Fraction(22, 7) >>> Fraction('3.141592653589793').limit_denominator(100) Fraction(311, 99) >>> Fraction(4321, 8765).limit_denominator(10000) Fraction(4321, 8765) """ # Algorithm notes: For any real number x, define a *best upper # approximation* to x to be a rational number p/q such that: # # (1) p/q >= x, and # (2) if p/q > r/s >= x then s > q, for any rational r/s. # # Define *best lower approximation* similarly. Then it can be # proved that a rational number is a best upper or lower # approximation to x if, and only if, it is a convergent or # semiconvergent of the (unique shortest) continued fraction # associated to x. # # To find a best rational approximation with denominator <= M, # we find the best upper and lower approximations with # denominator <= M and take whichever of these is closer to x. # In the event of a tie, the bound with smaller denominator is # chosen. If both denominators are equal (which can happen # only when max_denominator == 1 and self is midway between # two integers) the lower bound---i.e., the floor of self, is # taken. if max_denominator < 1: raise ValueError("max_denominator should be at least 1") if self._denominator <= max_denominator: return Fraction(self) p0, q0, p1, q1 = 0, 1, 1, 0 n, d = self._numerator, self._denominator while True: a = n//d q2 = q0+a*q1 if q2 > max_denominator: break p0, q0, p1, q1 = p1, q1, p0+a*p1, q2 n, d = d, n-a*d k = (max_denominator-q0)//q1 bound1 = Fraction(p0+k*p1, q0+k*q1) bound2 = Fraction(p1, q1) if abs(bound2 - self) <= abs(bound1-self): return bound2 else: return bound1 @property def numerator(a): return a._numerator @property def denominator(a): return a._denominator def __repr__(self): """repr(self)""" return ('Fraction(%s, %s)' % (self._numerator, self._denominator)) def __str__(self): """str(self)""" if self._denominator == 1: return str(self._numerator) else: return '%s/%s' % (self._numerator, self._denominator) def _operator_fallbacks(monomorphic_operator, fallback_operator): """Generates forward and reverse operators given a purely-rational operator and a function from the operator module. Use this like: __op__, __rop__ = _operator_fallbacks(just_rational_op, operator.op) In general, we want to implement the arithmetic operations so that mixed-mode operations either call an implementation whose author knew about the types of both arguments, or convert both to the nearest built in type and do the operation there. In Fraction, that means that we define __add__ and __radd__ as: def __add__(self, other): # Both types have numerators/denominator attributes, # so do the operation directly if isinstance(other, (int, Fraction)): return Fraction(self.numerator * other.denominator + other.numerator * self.denominator, self.denominator * other.denominator) # float and complex don't have those operations, but we # know about those types, so special case them. elif isinstance(other, float): return float(self) + other elif isinstance(other, complex): return complex(self) + other # Let the other type take over. return NotImplemented def __radd__(self, other): # radd handles more types than add because there's # nothing left to fall back to. if isinstance(other, numbers.Rational): return Fraction(self.numerator * other.denominator + other.numerator * self.denominator, self.denominator * other.denominator) elif isinstance(other, Real): return float(other) + float(self) elif isinstance(other, Complex): return complex(other) + complex(self) return NotImplemented There are 5 different cases for a mixed-type addition on Fraction. I'll refer to all of the above code that doesn't refer to Fraction, float, or complex as "boilerplate". 'r' will be an instance of Fraction, which is a subtype of Rational (r : Fraction <: Rational), and b : B <: Complex. The first three involve 'r + b': 1. If B <: Fraction, int, float, or complex, we handle that specially, and all is well. 2. If Fraction falls back to the boilerplate code, and it were to return a value from __add__, we'd miss the possibility that B defines a more intelligent __radd__, so the boilerplate should return NotImplemented from __add__. In particular, we don't handle Rational here, even though we could get an exact answer, in case the other type wants to do something special. 3. If B <: Fraction, Python tries B.__radd__ before Fraction.__add__. This is ok, because it was implemented with knowledge of Fraction, so it can handle those instances before delegating to Real or Complex. The next two situations describe 'b + r'. We assume that b didn't know about Fraction in its implementation, and that it uses similar boilerplate code: 4. If B <: Rational, then __radd_ converts both to the builtin rational type (hey look, that's us) and proceeds. 5. Otherwise, __radd__ tries to find the nearest common base ABC, and fall back to its builtin type. Since this class doesn't subclass a concrete type, there's no implementation to fall back to, so we need to try as hard as possible to return an actual value, or the user will get a TypeError. """ def forward(a, b): if isinstance(b, (int, Fraction)): return monomorphic_operator(a, b) elif isinstance(b, float): return fallback_operator(float(a), b) elif isinstance(b, complex): return fallback_operator(complex(a), b) else: return NotImplemented forward.__name__ = '__' + fallback_operator.__name__ + '__' forward.__doc__ = monomorphic_operator.__doc__ def reverse(b, a): if isinstance(a, numbers.Rational): # Includes ints. return monomorphic_operator(a, b) elif isinstance(a, numbers.Real): return fallback_operator(float(a), float(b)) elif isinstance(a, numbers.Complex): return fallback_operator(complex(a), complex(b)) else: return NotImplemented reverse.__name__ = '__r' + fallback_operator.__name__ + '__' reverse.__doc__ = monomorphic_operator.__doc__ return forward, reverse def _add(a, b): """a + b""" return Fraction(a.numerator * b.denominator + b.numerator * a.denominator, a.denominator * b.denominator) __add__, __radd__ = _operator_fallbacks(_add, operator.add) def _sub(a, b): """a - b""" return Fraction(a.numerator * b.denominator - b.numerator * a.denominator, a.denominator * b.denominator) __sub__, __rsub__ = _operator_fallbacks(_sub, operator.sub) def _mul(a, b): """a * b""" return Fraction(a.numerator * b.numerator, a.denominator * b.denominator) __mul__, __rmul__ = _operator_fallbacks(_mul, operator.mul) def _div(a, b): """a / b""" return Fraction(a.numerator * b.denominator, a.denominator * b.numerator) __truediv__, __rtruediv__ = _operator_fallbacks(_div, operator.truediv) def __floordiv__(a, b): """a // b""" return math.floor(a / b) def __rfloordiv__(b, a): """a // b""" return math.floor(a / b) def __mod__(a, b): """a % b""" div = a // b return a - b * div def __rmod__(b, a): """a % b""" div = a // b return a - b * div def __pow__(a, b): """a ** b If b is not an integer, the result will be a float or complex since roots are generally irrational. If b is an integer, the result will be rational. """ if isinstance(b, numbers.Rational): if b.denominator == 1: power = b.numerator if power >= 0: return Fraction(a._numerator ** power, a._denominator ** power) else: return Fraction(a._denominator ** -power, a._numerator ** -power) else: # A fractional power will generally produce an # irrational number. return float(a) ** float(b) else: return float(a) ** b def __rpow__(b, a): """a ** b""" if b._denominator == 1 and b._numerator >= 0: # If a is an int, keep it that way if possible. return a ** b._numerator if isinstance(a, numbers.Rational): return Fraction(a.numerator, a.denominator) ** b if b._denominator == 1: return a ** b._numerator return a ** float(b) def __pos__(a): """+a: Coerces a subclass instance to Fraction""" return Fraction(a._numerator, a._denominator) def __neg__(a): """-a""" return Fraction(-a._numerator, a._denominator) def __abs__(a): """abs(a)""" return Fraction(abs(a._numerator), a._denominator) def __trunc__(a): """trunc(a)""" if a._numerator < 0: return -(-a._numerator // a._denominator) else: return a._numerator // a._denominator def __floor__(a): """Will be math.floor(a) in 3.0.""" return a.numerator // a.denominator def __ceil__(a): """Will be math.ceil(a) in 3.0.""" # The negations cleverly convince floordiv to return the ceiling. return -(-a.numerator // a.denominator) def __round__(self, ndigits=None): """Will be round(self, ndigits) in 3.0. Rounds half toward even. """ if ndigits is None: floor, remainder = divmod(self.numerator, self.denominator) if remainder * 2 < self.denominator: return floor elif remainder * 2 > self.denominator: return floor + 1 # Deal with the half case: elif floor % 2 == 0: return floor else: return floor + 1 shift = 10**abs(ndigits) # See _operator_fallbacks.forward to check that the results of # these operations will always be Fraction and therefore have # round(). if ndigits > 0: return Fraction(round(self * shift), shift) else: return Fraction(round(self / shift) * shift) def __hash__(self): """hash(self)""" # XXX since this method is expensive, consider caching the result # In order to make sure that the hash of a Fraction agrees # with the hash of a numerically equal integer, float or # Decimal instance, we follow the rules for numeric hashes # outlined in the documentation. (See library docs, 'Built-in # Types'). # dinv is the inverse of self._denominator modulo the prime # _PyHASH_MODULUS, or 0 if self._denominator is divisible by # _PyHASH_MODULUS. dinv = pow(self._denominator, _PyHASH_MODULUS - 2, _PyHASH_MODULUS) if not dinv: hash_ = _PyHASH_INF else: hash_ = abs(self._numerator) * dinv % _PyHASH_MODULUS result = hash_ if self >= 0 else -hash_ return -2 if result == -1 else result def __eq__(a, b): """a == b""" if isinstance(b, numbers.Rational): return (a._numerator == b.numerator and a._denominator == b.denominator) if isinstance(b, numbers.Complex) and b.imag == 0: b = b.real if isinstance(b, float): if math.isnan(b) or math.isinf(b): # comparisons with an infinity or nan should behave in # the same way for any finite a, so treat a as zero. return 0.0 == b else: return a == a.from_float(b) else: # Since a doesn't know how to compare with b, let's give b # a chance to compare itself with a. return NotImplemented def _richcmp(self, other, op): """Helper for comparison operators, for internal use only. Implement comparison between a Rational instance `self`, and either another Rational instance or a float `other`. If `other` is not a Rational instance or a float, return NotImplemented. `op` should be one of the six standard comparison operators. """ # convert other to a Rational instance where reasonable. if isinstance(other, numbers.Rational): return op(self._numerator * other.denominator, self._denominator * other.numerator) if isinstance(other, float): if math.isnan(other) or math.isinf(other): return op(0.0, other) else: return op(self, self.from_float(other)) else: return NotImplemented def __lt__(a, b): """a < b""" return a._richcmp(b, operator.lt) def __gt__(a, b): """a > b""" return a._richcmp(b, operator.gt) def __le__(a, b): """a <= b""" return a._richcmp(b, operator.le) def __ge__(a, b): """a >= b""" return a._richcmp(b, operator.ge) def __bool__(a): """a != 0""" return a._numerator != 0 # support for pickling, copy, and deepcopy def __reduce__(self): return (self.__class__, (str(self),)) def __copy__(self): if type(self) == Fraction: return self # I'm immutable; therefore I am my own clone return self.__class__(self._numerator, self._denominator) def __deepcopy__(self, memo): if type(self) == Fraction: return self # My components are also immutable return self.__class__(self._numerator, self._denominator)
gpl-3.0
Psycojoker/wanawana
wanawana/settings.py
1
2687
""" Django settings for wanawana project. For more information on this file, see https://docs.djangoproject.com/en/1.7/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.7/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os BASE_DIR = os.path.dirname(os.path.dirname(__file__)) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'w2=4yi@cyc@vsio@$tvz$%&_po6si@533=cwh5kr2dk#pd69)v' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATE_DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'bootstrap3', 'django_extensions', 'debug_toolbar', 'django_pdb', 'wanawana', 'users', 'events', ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django_pdb.middleware.PdbMiddleware', ) ROOT_URLCONF = 'wanawana.urls' TEMPLATE_LOADERS = ( 'hamlpy.template.loaders.HamlPyFilesystemLoader', 'hamlpy.template.loaders.HamlPyAppDirectoriesLoader', 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ) WSGI_APPLICATION = 'wanawana.wsgi.application' # Email configuration EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' if DEBUG: EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' # Database # https://docs.djangoproject.com/en/1.7/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Internationalization # https://docs.djangoproject.com/en/1.7/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.7/howto/static-files/ STATIC_URL = '/static/' try: from settings_local import * except ImportError: pass
gpl-3.0
bertucho/epic-movie-quotes-quiz
dialogos/build/Twisted/twisted/protocols/sip.py
8
42262
# -*- test-case-name: twisted.test.test_sip -*- # Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """Session Initialization Protocol. Documented in RFC 2543. [Superseded by 3261] This module contains a deprecated implementation of HTTP Digest authentication. See L{twisted.cred.credentials} and L{twisted.cred._digest} for its new home. """ # system imports import socket, time, sys, random, warnings from hashlib import md5 from zope.interface import implements, Interface # twisted imports from twisted.python import log, util from twisted.python.deprecate import deprecated from twisted.python.versions import Version from twisted.internet import protocol, defer, reactor from twisted import cred from twisted.cred.credentials import UsernameHashedPassword, UsernamePassword # sibling imports from twisted.protocols import basic PORT = 5060 # SIP headers have short forms shortHeaders = {"call-id": "i", "contact": "m", "content-encoding": "e", "content-length": "l", "content-type": "c", "from": "f", "subject": "s", "to": "t", "via": "v", } longHeaders = {} for k, v in shortHeaders.items(): longHeaders[v] = k del k, v statusCodes = { 100: "Trying", 180: "Ringing", 181: "Call Is Being Forwarded", 182: "Queued", 183: "Session Progress", 200: "OK", 300: "Multiple Choices", 301: "Moved Permanently", 302: "Moved Temporarily", 303: "See Other", 305: "Use Proxy", 380: "Alternative Service", 400: "Bad Request", 401: "Unauthorized", 402: "Payment Required", 403: "Forbidden", 404: "Not Found", 405: "Method Not Allowed", 406: "Not Acceptable", 407: "Proxy Authentication Required", 408: "Request Timeout", 409: "Conflict", # Not in RFC3261 410: "Gone", 411: "Length Required", # Not in RFC3261 413: "Request Entity Too Large", 414: "Request-URI Too Large", 415: "Unsupported Media Type", 416: "Unsupported URI Scheme", 420: "Bad Extension", 421: "Extension Required", 423: "Interval Too Brief", 480: "Temporarily Unavailable", 481: "Call/Transaction Does Not Exist", 482: "Loop Detected", 483: "Too Many Hops", 484: "Address Incomplete", 485: "Ambiguous", 486: "Busy Here", 487: "Request Terminated", 488: "Not Acceptable Here", 491: "Request Pending", 493: "Undecipherable", 500: "Internal Server Error", 501: "Not Implemented", 502: "Bad Gateway", # no donut 503: "Service Unavailable", 504: "Server Time-out", 505: "SIP Version not supported", 513: "Message Too Large", 600: "Busy Everywhere", 603: "Decline", 604: "Does not exist anywhere", 606: "Not Acceptable", } specialCases = { 'cseq': 'CSeq', 'call-id': 'Call-ID', 'www-authenticate': 'WWW-Authenticate', } def dashCapitalize(s): ''' Capitalize a string, making sure to treat - as a word separator ''' return '-'.join([ x.capitalize() for x in s.split('-')]) def unq(s): if s[0] == s[-1] == '"': return s[1:-1] return s def DigestCalcHA1( pszAlg, pszUserName, pszRealm, pszPassword, pszNonce, pszCNonce, ): m = md5() m.update(pszUserName) m.update(":") m.update(pszRealm) m.update(":") m.update(pszPassword) HA1 = m.digest() if pszAlg == "md5-sess": m = md5() m.update(HA1) m.update(":") m.update(pszNonce) m.update(":") m.update(pszCNonce) HA1 = m.digest() return HA1.encode('hex') DigestCalcHA1 = deprecated(Version("Twisted", 9, 0, 0))(DigestCalcHA1) def DigestCalcResponse( HA1, pszNonce, pszNonceCount, pszCNonce, pszQop, pszMethod, pszDigestUri, pszHEntity, ): m = md5() m.update(pszMethod) m.update(":") m.update(pszDigestUri) if pszQop == "auth-int": m.update(":") m.update(pszHEntity) HA2 = m.digest().encode('hex') m = md5() m.update(HA1) m.update(":") m.update(pszNonce) m.update(":") if pszNonceCount and pszCNonce: # pszQop: m.update(pszNonceCount) m.update(":") m.update(pszCNonce) m.update(":") m.update(pszQop) m.update(":") m.update(HA2) hash = m.digest().encode('hex') return hash DigestCalcResponse = deprecated(Version("Twisted", 9, 0, 0))(DigestCalcResponse) _absent = object() class Via(object): """ A L{Via} is a SIP Via header, representing a segment of the path taken by the request. See RFC 3261, sections 8.1.1.7, 18.2.2, and 20.42. @ivar transport: Network protocol used for this leg. (Probably either "TCP" or "UDP".) @type transport: C{str} @ivar branch: Unique identifier for this request. @type branch: C{str} @ivar host: Hostname or IP for this leg. @type host: C{str} @ivar port: Port used for this leg. @type port C{int}, or None. @ivar rportRequested: Whether to request RFC 3581 client processing or not. @type rportRequested: C{bool} @ivar rportValue: Servers wishing to honor requests for RFC 3581 processing should set this parameter to the source port the request was received from. @type rportValue: C{int}, or None. @ivar ttl: Time-to-live for requests on multicast paths. @type ttl: C{int}, or None. @ivar maddr: The destination multicast address, if any. @type maddr: C{str}, or None. @ivar hidden: Obsolete in SIP 2.0. @type hidden: C{bool} @ivar otherParams: Any other parameters in the header. @type otherParams: C{dict} """ def __init__(self, host, port=PORT, transport="UDP", ttl=None, hidden=False, received=None, rport=_absent, branch=None, maddr=None, **kw): """ Set parameters of this Via header. All arguments correspond to attributes of the same name. To maintain compatibility with old SIP code, the 'rport' argument is used to determine the values of C{rportRequested} and C{rportValue}. If None, C{rportRequested} is set to True. (The deprecated method for doing this is to pass True.) If an integer, C{rportValue} is set to the given value. Any arguments not explicitly named here are collected into the C{otherParams} dict. """ self.transport = transport self.host = host self.port = port self.ttl = ttl self.hidden = hidden self.received = received if rport is True: warnings.warn( "rport=True is deprecated since Twisted 9.0.", DeprecationWarning, stacklevel=2) self.rportValue = None self.rportRequested = True elif rport is None: self.rportValue = None self.rportRequested = True elif rport is _absent: self.rportValue = None self.rportRequested = False else: self.rportValue = rport self.rportRequested = False self.branch = branch self.maddr = maddr self.otherParams = kw def _getrport(self): """ Returns the rport value expected by the old SIP code. """ if self.rportRequested == True: return True elif self.rportValue is not None: return self.rportValue else: return None def _setrport(self, newRPort): """ L{Base._fixupNAT} sets C{rport} directly, so this method sets C{rportValue} based on that. @param newRPort: The new rport value. @type newRPort: C{int} """ self.rportValue = newRPort self.rportRequested = False rport = property(_getrport, _setrport) def toString(self): """ Serialize this header for use in a request or response. """ s = "SIP/2.0/%s %s:%s" % (self.transport, self.host, self.port) if self.hidden: s += ";hidden" for n in "ttl", "branch", "maddr", "received": value = getattr(self, n) if value is not None: s += ";%s=%s" % (n, value) if self.rportRequested: s += ";rport" elif self.rportValue is not None: s += ";rport=%s" % (self.rport,) etc = self.otherParams.items() etc.sort() for k, v in etc: if v is None: s += ";" + k else: s += ";%s=%s" % (k, v) return s def parseViaHeader(value): """ Parse a Via header. @return: The parsed version of this header. @rtype: L{Via} """ parts = value.split(";") sent, params = parts[0], parts[1:] protocolinfo, by = sent.split(" ", 1) by = by.strip() result = {} pname, pversion, transport = protocolinfo.split("/") if pname != "SIP" or pversion != "2.0": raise ValueError, "wrong protocol or version: %r" % value result["transport"] = transport if ":" in by: host, port = by.split(":") result["port"] = int(port) result["host"] = host else: result["host"] = by for p in params: # it's the comment-striping dance! p = p.strip().split(" ", 1) if len(p) == 1: p, comment = p[0], "" else: p, comment = p if p == "hidden": result["hidden"] = True continue parts = p.split("=", 1) if len(parts) == 1: name, value = parts[0], None else: name, value = parts if name in ("rport", "ttl"): value = int(value) result[name] = value return Via(**result) class URL: """A SIP URL.""" def __init__(self, host, username=None, password=None, port=None, transport=None, usertype=None, method=None, ttl=None, maddr=None, tag=None, other=None, headers=None): self.username = username self.host = host self.password = password self.port = port self.transport = transport self.usertype = usertype self.method = method self.tag = tag self.ttl = ttl self.maddr = maddr if other == None: self.other = [] else: self.other = other if headers == None: self.headers = {} else: self.headers = headers def toString(self): l = []; w = l.append w("sip:") if self.username != None: w(self.username) if self.password != None: w(":%s" % self.password) w("@") w(self.host) if self.port != None: w(":%d" % self.port) if self.usertype != None: w(";user=%s" % self.usertype) for n in ("transport", "ttl", "maddr", "method", "tag"): v = getattr(self, n) if v != None: w(";%s=%s" % (n, v)) for v in self.other: w(";%s" % v) if self.headers: w("?") w("&".join([("%s=%s" % (specialCases.get(h) or dashCapitalize(h), v)) for (h, v) in self.headers.items()])) return "".join(l) def __str__(self): return self.toString() def __repr__(self): return '<URL %s:%s@%s:%r/%s>' % (self.username, self.password, self.host, self.port, self.transport) def parseURL(url, host=None, port=None): """Return string into URL object. URIs are of form 'sip:[email protected]'. """ d = {} if not url.startswith("sip:"): raise ValueError("unsupported scheme: " + url[:4]) parts = url[4:].split(";") userdomain, params = parts[0], parts[1:] udparts = userdomain.split("@", 1) if len(udparts) == 2: userpass, hostport = udparts upparts = userpass.split(":", 1) if len(upparts) == 1: d["username"] = upparts[0] else: d["username"] = upparts[0] d["password"] = upparts[1] else: hostport = udparts[0] hpparts = hostport.split(":", 1) if len(hpparts) == 1: d["host"] = hpparts[0] else: d["host"] = hpparts[0] d["port"] = int(hpparts[1]) if host != None: d["host"] = host if port != None: d["port"] = port for p in params: if p == params[-1] and "?" in p: d["headers"] = h = {} p, headers = p.split("?", 1) for header in headers.split("&"): k, v = header.split("=") h[k] = v nv = p.split("=", 1) if len(nv) == 1: d.setdefault("other", []).append(p) continue name, value = nv if name == "user": d["usertype"] = value elif name in ("transport", "ttl", "maddr", "method", "tag"): if name == "ttl": value = int(value) d[name] = value else: d.setdefault("other", []).append(p) return URL(**d) def cleanRequestURL(url): """Clean a URL from a Request line.""" url.transport = None url.maddr = None url.ttl = None url.headers = {} def parseAddress(address, host=None, port=None, clean=0): """Return (name, uri, params) for From/To/Contact header. @param clean: remove unnecessary info, usually for From and To headers. """ address = address.strip() # simple 'sip:foo' case if address.startswith("sip:"): return "", parseURL(address, host=host, port=port), {} params = {} name, url = address.split("<", 1) name = name.strip() if name.startswith('"'): name = name[1:] if name.endswith('"'): name = name[:-1] url, paramstring = url.split(">", 1) url = parseURL(url, host=host, port=port) paramstring = paramstring.strip() if paramstring: for l in paramstring.split(";"): if not l: continue k, v = l.split("=") params[k] = v if clean: # rfc 2543 6.21 url.ttl = None url.headers = {} url.transport = None url.maddr = None return name, url, params class SIPError(Exception): def __init__(self, code, phrase=None): if phrase is None: phrase = statusCodes[code] Exception.__init__(self, "SIP error (%d): %s" % (code, phrase)) self.code = code self.phrase = phrase class RegistrationError(SIPError): """Registration was not possible.""" class Message: """A SIP message.""" length = None def __init__(self): self.headers = util.OrderedDict() # map name to list of values self.body = "" self.finished = 0 def addHeader(self, name, value): name = name.lower() name = longHeaders.get(name, name) if name == "content-length": self.length = int(value) self.headers.setdefault(name,[]).append(value) def bodyDataReceived(self, data): self.body += data def creationFinished(self): if (self.length != None) and (self.length != len(self.body)): raise ValueError, "wrong body length" self.finished = 1 def toString(self): s = "%s\r\n" % self._getHeaderLine() for n, vs in self.headers.items(): for v in vs: s += "%s: %s\r\n" % (specialCases.get(n) or dashCapitalize(n), v) s += "\r\n" s += self.body return s def _getHeaderLine(self): raise NotImplementedError class Request(Message): """A Request for a URI""" def __init__(self, method, uri, version="SIP/2.0"): Message.__init__(self) self.method = method if isinstance(uri, URL): self.uri = uri else: self.uri = parseURL(uri) cleanRequestURL(self.uri) def __repr__(self): return "<SIP Request %d:%s %s>" % (id(self), self.method, self.uri.toString()) def _getHeaderLine(self): return "%s %s SIP/2.0" % (self.method, self.uri.toString()) class Response(Message): """A Response to a URI Request""" def __init__(self, code, phrase=None, version="SIP/2.0"): Message.__init__(self) self.code = code if phrase == None: phrase = statusCodes[code] self.phrase = phrase def __repr__(self): return "<SIP Response %d:%s>" % (id(self), self.code) def _getHeaderLine(self): return "SIP/2.0 %s %s" % (self.code, self.phrase) class MessagesParser(basic.LineReceiver): """A SIP messages parser. Expects dataReceived, dataDone repeatedly, in that order. Shouldn't be connected to actual transport. """ version = "SIP/2.0" acceptResponses = 1 acceptRequests = 1 state = "firstline" # or "headers", "body" or "invalid" debug = 0 def __init__(self, messageReceivedCallback): self.messageReceived = messageReceivedCallback self.reset() def reset(self, remainingData=""): self.state = "firstline" self.length = None # body length self.bodyReceived = 0 # how much of the body we received self.message = None self.header = None self.setLineMode(remainingData) def invalidMessage(self): self.state = "invalid" self.setRawMode() def dataDone(self): # clear out any buffered data that may be hanging around self.clearLineBuffer() if self.state == "firstline": return if self.state != "body": self.reset() return if self.length == None: # no content-length header, so end of data signals message done self.messageDone() elif self.length < self.bodyReceived: # aborted in the middle self.reset() else: # we have enough data and message wasn't finished? something is wrong raise RuntimeError, "this should never happen" def dataReceived(self, data): try: basic.LineReceiver.dataReceived(self, data) except: log.err() self.invalidMessage() def handleFirstLine(self, line): """Expected to create self.message.""" raise NotImplementedError def lineLengthExceeded(self, line): self.invalidMessage() def lineReceived(self, line): if self.state == "firstline": while line.startswith("\n") or line.startswith("\r"): line = line[1:] if not line: return try: a, b, c = line.split(" ", 2) except ValueError: self.invalidMessage() return if a == "SIP/2.0" and self.acceptResponses: # response try: code = int(b) except ValueError: self.invalidMessage() return self.message = Response(code, c) elif c == "SIP/2.0" and self.acceptRequests: self.message = Request(a, b) else: self.invalidMessage() return self.state = "headers" return else: assert self.state == "headers" if line: # multiline header if line.startswith(" ") or line.startswith("\t"): name, value = self.header self.header = name, (value + line.lstrip()) else: # new header if self.header: self.message.addHeader(*self.header) self.header = None try: name, value = line.split(":", 1) except ValueError: self.invalidMessage() return self.header = name, value.lstrip() # XXX we assume content-length won't be multiline if name.lower() == "content-length": try: self.length = int(value.lstrip()) except ValueError: self.invalidMessage() return else: # CRLF, we now have message body until self.length bytes, # or if no length was given, until there is no more data # from the connection sending us data. self.state = "body" if self.header: self.message.addHeader(*self.header) self.header = None if self.length == 0: self.messageDone() return self.setRawMode() def messageDone(self, remainingData=""): assert self.state == "body" self.message.creationFinished() self.messageReceived(self.message) self.reset(remainingData) def rawDataReceived(self, data): assert self.state in ("body", "invalid") if self.state == "invalid": return if self.length == None: self.message.bodyDataReceived(data) else: dataLen = len(data) expectedLen = self.length - self.bodyReceived if dataLen > expectedLen: self.message.bodyDataReceived(data[:expectedLen]) self.messageDone(data[expectedLen:]) return else: self.bodyReceived += dataLen self.message.bodyDataReceived(data) if self.bodyReceived == self.length: self.messageDone() class Base(protocol.DatagramProtocol): """Base class for SIP clients and servers.""" PORT = PORT debug = False def __init__(self): self.messages = [] self.parser = MessagesParser(self.addMessage) def addMessage(self, msg): self.messages.append(msg) def datagramReceived(self, data, addr): self.parser.dataReceived(data) self.parser.dataDone() for m in self.messages: self._fixupNAT(m, addr) if self.debug: log.msg("Received %r from %r" % (m.toString(), addr)) if isinstance(m, Request): self.handle_request(m, addr) else: self.handle_response(m, addr) self.messages[:] = [] def _fixupNAT(self, message, (srcHost, srcPort)): # RFC 2543 6.40.2, senderVia = parseViaHeader(message.headers["via"][0]) if senderVia.host != srcHost: senderVia.received = srcHost if senderVia.port != srcPort: senderVia.rport = srcPort message.headers["via"][0] = senderVia.toString() elif senderVia.rport == True: senderVia.received = srcHost senderVia.rport = srcPort message.headers["via"][0] = senderVia.toString() def deliverResponse(self, responseMessage): """Deliver response. Destination is based on topmost Via header.""" destVia = parseViaHeader(responseMessage.headers["via"][0]) # XXX we don't do multicast yet host = destVia.received or destVia.host port = destVia.rport or destVia.port or self.PORT destAddr = URL(host=host, port=port) self.sendMessage(destAddr, responseMessage) def responseFromRequest(self, code, request): """Create a response to a request message.""" response = Response(code) for name in ("via", "to", "from", "call-id", "cseq"): response.headers[name] = request.headers.get(name, [])[:] return response def sendMessage(self, destURL, message): """Send a message. @param destURL: C{URL}. This should be a *physical* URL, not a logical one. @param message: The message to send. """ if destURL.transport not in ("udp", None): raise RuntimeError, "only UDP currently supported" if self.debug: log.msg("Sending %r to %r" % (message.toString(), destURL)) self.transport.write(message.toString(), (destURL.host, destURL.port or self.PORT)) def handle_request(self, message, addr): """Override to define behavior for requests received @type message: C{Message} @type addr: C{tuple} """ raise NotImplementedError def handle_response(self, message, addr): """Override to define behavior for responses received. @type message: C{Message} @type addr: C{tuple} """ raise NotImplementedError class IContact(Interface): """A user of a registrar or proxy""" class Registration: def __init__(self, secondsToExpiry, contactURL): self.secondsToExpiry = secondsToExpiry self.contactURL = contactURL class IRegistry(Interface): """Allows registration of logical->physical URL mapping.""" def registerAddress(domainURL, logicalURL, physicalURL): """Register the physical address of a logical URL. @return: Deferred of C{Registration} or failure with RegistrationError. """ def unregisterAddress(domainURL, logicalURL, physicalURL): """Unregister the physical address of a logical URL. @return: Deferred of C{Registration} or failure with RegistrationError. """ def getRegistrationInfo(logicalURL): """Get registration info for logical URL. @return: Deferred of C{Registration} object or failure of LookupError. """ class ILocator(Interface): """Allow looking up physical address for logical URL.""" def getAddress(logicalURL): """Return physical URL of server for logical URL of user. @param logicalURL: a logical C{URL}. @return: Deferred which becomes URL or fails with LookupError. """ class Proxy(Base): """SIP proxy.""" PORT = PORT locator = None # object implementing ILocator def __init__(self, host=None, port=PORT): """Create new instance. @param host: our hostname/IP as set in Via headers. @param port: our port as set in Via headers. """ self.host = host or socket.getfqdn() self.port = port Base.__init__(self) def getVia(self): """Return value of Via header for this proxy.""" return Via(host=self.host, port=self.port) def handle_request(self, message, addr): # send immediate 100/trying message before processing #self.deliverResponse(self.responseFromRequest(100, message)) f = getattr(self, "handle_%s_request" % message.method, None) if f is None: f = self.handle_request_default try: d = f(message, addr) except SIPError, e: self.deliverResponse(self.responseFromRequest(e.code, message)) except: log.err() self.deliverResponse(self.responseFromRequest(500, message)) else: if d is not None: d.addErrback(lambda e: self.deliverResponse(self.responseFromRequest(e.code, message)) ) def handle_request_default(self, message, (srcHost, srcPort)): """Default request handler. Default behaviour for OPTIONS and unknown methods for proxies is to forward message on to the client. Since at the moment we are stateless proxy, that's basically everything. """ def _mungContactHeader(uri, message): message.headers['contact'][0] = uri.toString() return self.sendMessage(uri, message) viaHeader = self.getVia() if viaHeader.toString() in message.headers["via"]: # must be a loop, so drop message log.msg("Dropping looped message.") return message.headers["via"].insert(0, viaHeader.toString()) name, uri, tags = parseAddress(message.headers["to"][0], clean=1) # this is broken and needs refactoring to use cred d = self.locator.getAddress(uri) d.addCallback(self.sendMessage, message) d.addErrback(self._cantForwardRequest, message) def _cantForwardRequest(self, error, message): error.trap(LookupError) del message.headers["via"][0] # this'll be us self.deliverResponse(self.responseFromRequest(404, message)) def deliverResponse(self, responseMessage): """Deliver response. Destination is based on topmost Via header.""" destVia = parseViaHeader(responseMessage.headers["via"][0]) # XXX we don't do multicast yet host = destVia.received or destVia.host port = destVia.rport or destVia.port or self.PORT destAddr = URL(host=host, port=port) self.sendMessage(destAddr, responseMessage) def responseFromRequest(self, code, request): """Create a response to a request message.""" response = Response(code) for name in ("via", "to", "from", "call-id", "cseq"): response.headers[name] = request.headers.get(name, [])[:] return response def handle_response(self, message, addr): """Default response handler.""" v = parseViaHeader(message.headers["via"][0]) if (v.host, v.port) != (self.host, self.port): # we got a message not intended for us? # XXX note this check breaks if we have multiple external IPs # yay for suck protocols log.msg("Dropping incorrectly addressed message") return del message.headers["via"][0] if not message.headers["via"]: # this message is addressed to us self.gotResponse(message, addr) return self.deliverResponse(message) def gotResponse(self, message, addr): """Called with responses that are addressed at this server.""" pass class IAuthorizer(Interface): def getChallenge(peer): """Generate a challenge the client may respond to. @type peer: C{tuple} @param peer: The client's address @rtype: C{str} @return: The challenge string """ def decode(response): """Create a credentials object from the given response. @type response: C{str} """ class BasicAuthorizer: """Authorizer for insecure Basic (base64-encoded plaintext) authentication. This form of authentication is broken and insecure. Do not use it. """ implements(IAuthorizer) def __init__(self): """ This method exists solely to issue a deprecation warning. """ warnings.warn( "twisted.protocols.sip.BasicAuthorizer was deprecated " "in Twisted 9.0.0", category=DeprecationWarning, stacklevel=2) def getChallenge(self, peer): return None def decode(self, response): # At least one SIP client improperly pads its Base64 encoded messages for i in range(3): try: creds = (response + ('=' * i)).decode('base64') except: pass else: break else: # Totally bogus raise SIPError(400) p = creds.split(':', 1) if len(p) == 2: return UsernamePassword(*p) raise SIPError(400) class DigestedCredentials(UsernameHashedPassword): """Yet Another Simple Digest-MD5 authentication scheme""" def __init__(self, username, fields, challenges): warnings.warn( "twisted.protocols.sip.DigestedCredentials was deprecated " "in Twisted 9.0.0", category=DeprecationWarning, stacklevel=2) self.username = username self.fields = fields self.challenges = challenges def checkPassword(self, password): method = 'REGISTER' response = self.fields.get('response') uri = self.fields.get('uri') nonce = self.fields.get('nonce') cnonce = self.fields.get('cnonce') nc = self.fields.get('nc') algo = self.fields.get('algorithm', 'MD5') qop = self.fields.get('qop-options', 'auth') opaque = self.fields.get('opaque') if opaque not in self.challenges: return False del self.challenges[opaque] user, domain = self.username.split('@', 1) if uri is None: uri = 'sip:' + domain expected = DigestCalcResponse( DigestCalcHA1(algo, user, domain, password, nonce, cnonce), nonce, nc, cnonce, qop, method, uri, None, ) return expected == response class DigestAuthorizer: CHALLENGE_LIFETIME = 15 implements(IAuthorizer) def __init__(self): warnings.warn( "twisted.protocols.sip.DigestAuthorizer was deprecated " "in Twisted 9.0.0", category=DeprecationWarning, stacklevel=2) self.outstanding = {} def generateNonce(self): c = tuple([random.randrange(sys.maxint) for _ in range(3)]) c = '%d%d%d' % c return c def generateOpaque(self): return str(random.randrange(sys.maxint)) def getChallenge(self, peer): c = self.generateNonce() o = self.generateOpaque() self.outstanding[o] = c return ','.join(( 'nonce="%s"' % c, 'opaque="%s"' % o, 'qop-options="auth"', 'algorithm="MD5"', )) def decode(self, response): response = ' '.join(response.splitlines()) parts = response.split(',') auth = dict([(k.strip(), unq(v.strip())) for (k, v) in [p.split('=', 1) for p in parts]]) try: username = auth['username'] except KeyError: raise SIPError(401) try: return DigestedCredentials(username, auth, self.outstanding) except: raise SIPError(400) class RegisterProxy(Proxy): """A proxy that allows registration for a specific domain. Unregistered users won't be handled. """ portal = None registry = None # should implement IRegistry authorizers = {} def __init__(self, *args, **kw): Proxy.__init__(self, *args, **kw) self.liveChallenges = {} if "digest" not in self.authorizers: self.authorizers["digest"] = DigestAuthorizer() def handle_ACK_request(self, message, (host, port)): # XXX # ACKs are a client's way of indicating they got the last message # Responding to them is not a good idea. # However, we should keep track of terminal messages and re-transmit # if no ACK is received. pass def handle_REGISTER_request(self, message, (host, port)): """Handle a registration request. Currently registration is not proxied. """ if self.portal is None: # There is no portal. Let anyone in. self.register(message, host, port) else: # There is a portal. Check for credentials. if not message.headers.has_key("authorization"): return self.unauthorized(message, host, port) else: return self.login(message, host, port) def unauthorized(self, message, host, port): m = self.responseFromRequest(401, message) for (scheme, auth) in self.authorizers.iteritems(): chal = auth.getChallenge((host, port)) if chal is None: value = '%s realm="%s"' % (scheme.title(), self.host) else: value = '%s %s,realm="%s"' % (scheme.title(), chal, self.host) m.headers.setdefault('www-authenticate', []).append(value) self.deliverResponse(m) def login(self, message, host, port): parts = message.headers['authorization'][0].split(None, 1) a = self.authorizers.get(parts[0].lower()) if a: try: c = a.decode(parts[1]) except SIPError: raise except: log.err() self.deliverResponse(self.responseFromRequest(500, message)) else: c.username += '@' + self.host self.portal.login(c, None, IContact ).addCallback(self._cbLogin, message, host, port ).addErrback(self._ebLogin, message, host, port ).addErrback(log.err ) else: self.deliverResponse(self.responseFromRequest(501, message)) def _cbLogin(self, (i, a, l), message, host, port): # It's stateless, matey. What a joke. self.register(message, host, port) def _ebLogin(self, failure, message, host, port): failure.trap(cred.error.UnauthorizedLogin) self.unauthorized(message, host, port) def register(self, message, host, port): """Allow all users to register""" name, toURL, params = parseAddress(message.headers["to"][0], clean=1) contact = None if message.headers.has_key("contact"): contact = message.headers["contact"][0] if message.headers.get("expires", [None])[0] == "0": self.unregister(message, toURL, contact) else: # XXX Check expires on appropriate URL, and pass it to registry # instead of having registry hardcode it. if contact is not None: name, contactURL, params = parseAddress(contact, host=host, port=port) d = self.registry.registerAddress(message.uri, toURL, contactURL) else: d = self.registry.getRegistrationInfo(toURL) d.addCallbacks(self._cbRegister, self._ebRegister, callbackArgs=(message,), errbackArgs=(message,) ) def _cbRegister(self, registration, message): response = self.responseFromRequest(200, message) if registration.contactURL != None: response.addHeader("contact", registration.contactURL.toString()) response.addHeader("expires", "%d" % registration.secondsToExpiry) response.addHeader("content-length", "0") self.deliverResponse(response) def _ebRegister(self, error, message): error.trap(RegistrationError, LookupError) # XXX return error message, and alter tests to deal with # this, currently tests assume no message sent on failure def unregister(self, message, toURL, contact): try: expires = int(message.headers["expires"][0]) except ValueError: self.deliverResponse(self.responseFromRequest(400, message)) else: if expires == 0: if contact == "*": contactURL = "*" else: name, contactURL, params = parseAddress(contact) d = self.registry.unregisterAddress(message.uri, toURL, contactURL) d.addCallback(self._cbUnregister, message ).addErrback(self._ebUnregister, message ) def _cbUnregister(self, registration, message): msg = self.responseFromRequest(200, message) msg.headers.setdefault('contact', []).append(registration.contactURL.toString()) msg.addHeader("expires", "0") self.deliverResponse(msg) def _ebUnregister(self, registration, message): pass class InMemoryRegistry: """A simplistic registry for a specific domain.""" implements(IRegistry, ILocator) def __init__(self, domain): self.domain = domain # the domain we handle registration for self.users = {} # map username to (IDelayedCall for expiry, address URI) def getAddress(self, userURI): if userURI.host != self.domain: return defer.fail(LookupError("unknown domain")) if userURI.username in self.users: dc, url = self.users[userURI.username] return defer.succeed(url) else: return defer.fail(LookupError("no such user")) def getRegistrationInfo(self, userURI): if userURI.host != self.domain: return defer.fail(LookupError("unknown domain")) if self.users.has_key(userURI.username): dc, url = self.users[userURI.username] return defer.succeed(Registration(int(dc.getTime() - time.time()), url)) else: return defer.fail(LookupError("no such user")) def _expireRegistration(self, username): try: dc, url = self.users[username] except KeyError: return defer.fail(LookupError("no such user")) else: dc.cancel() del self.users[username] return defer.succeed(Registration(0, url)) def registerAddress(self, domainURL, logicalURL, physicalURL): if domainURL.host != self.domain: log.msg("Registration for domain we don't handle.") return defer.fail(RegistrationError(404)) if logicalURL.host != self.domain: log.msg("Registration for domain we don't handle.") return defer.fail(RegistrationError(404)) if logicalURL.username in self.users: dc, old = self.users[logicalURL.username] dc.reset(3600) else: dc = reactor.callLater(3600, self._expireRegistration, logicalURL.username) log.msg("Registered %s at %s" % (logicalURL.toString(), physicalURL.toString())) self.users[logicalURL.username] = (dc, physicalURL) return defer.succeed(Registration(int(dc.getTime() - time.time()), physicalURL)) def unregisterAddress(self, domainURL, logicalURL, physicalURL): return self._expireRegistration(logicalURL.username)
mit
pombredanne/tahoe-lafs
src/allmydata/mutable/filenode.py
2
46275
import random from zope.interface import implements from twisted.internet import defer, reactor from foolscap.api import eventually from allmydata.interfaces import IMutableFileNode, ICheckable, ICheckResults, \ NotEnoughSharesError, MDMF_VERSION, SDMF_VERSION, IMutableUploadable, \ IMutableFileVersion, IWriteable from allmydata.util import hashutil, log, consumer, deferredutil, mathutil from allmydata.util.assertutil import precondition from allmydata.uri import WriteableSSKFileURI, ReadonlySSKFileURI, \ WriteableMDMFFileURI, ReadonlyMDMFFileURI from allmydata.monitor import Monitor from pycryptopp.cipher.aes import AES from allmydata.mutable.publish import Publish, MutableData,\ TransformingUploadable from allmydata.mutable.common import MODE_READ, MODE_WRITE, MODE_CHECK, UnrecoverableFileError, \ UncoordinatedWriteError from allmydata.mutable.servermap import ServerMap, ServermapUpdater from allmydata.mutable.retrieve import Retrieve from allmydata.mutable.checker import MutableChecker, MutableCheckAndRepairer from allmydata.mutable.repairer import Repairer class BackoffAgent: # these parameters are copied from foolscap.reconnector, which gets them # from twisted.internet.protocol.ReconnectingClientFactory initialDelay = 1.0 factor = 2.7182818284590451 # (math.e) jitter = 0.11962656492 # molar Planck constant times c, Joule meter/mole maxRetries = 4 def __init__(self): self._delay = self.initialDelay self._count = 0 def delay(self, node, f): self._count += 1 if self._count == 4: return f self._delay = self._delay * self.factor self._delay = random.normalvariate(self._delay, self._delay * self.jitter) d = defer.Deferred() reactor.callLater(self._delay, d.callback, None) return d # use nodemaker.create_mutable_file() to make one of these class MutableFileNode: implements(IMutableFileNode, ICheckable) def __init__(self, storage_broker, secret_holder, default_encoding_parameters, history): self._storage_broker = storage_broker self._secret_holder = secret_holder self._default_encoding_parameters = default_encoding_parameters self._history = history self._pubkey = None # filled in upon first read self._privkey = None # filled in if we're mutable # we keep track of the last encoding parameters that we use. These # are updated upon retrieve, and used by publish. If we publish # without ever reading (i.e. overwrite()), then we use these values. self._required_shares = default_encoding_parameters["k"] self._total_shares = default_encoding_parameters["n"] self._sharemap = {} # known shares, shnum-to-[nodeids] self._most_recent_size = None # filled in after __init__ if we're being created for the first time; # filled in by the servermap updater before publishing, otherwise. # set to this default value in case neither of those things happen, # or in case the servermap can't find any shares to tell us what # to publish as. self._protocol_version = None # all users of this MutableFileNode go through the serializer. This # takes advantage of the fact that Deferreds discard the callbacks # that they're done with, so we can keep using the same Deferred # forever without consuming more and more memory. self._serializer = defer.succeed(None) # Starting with MDMF, we can get these from caps if they're # there. Leave them alone for now; they'll be filled in by my # init_from_cap method if necessary. self._downloader_hints = {} def __repr__(self): if hasattr(self, '_uri'): return "<%s %x %s %s>" % (self.__class__.__name__, id(self), self.is_readonly() and 'RO' or 'RW', self._uri.abbrev()) else: return "<%s %x %s %s>" % (self.__class__.__name__, id(self), None, None) def init_from_cap(self, filecap): # we have the URI, but we have not yet retrieved the public # verification key, nor things like 'k' or 'N'. If and when someone # wants to get our contents, we'll pull from shares and fill those # in. if isinstance(filecap, (WriteableMDMFFileURI, ReadonlyMDMFFileURI)): self._protocol_version = MDMF_VERSION elif isinstance(filecap, (ReadonlySSKFileURI, WriteableSSKFileURI)): self._protocol_version = SDMF_VERSION self._uri = filecap self._writekey = None if not filecap.is_readonly() and filecap.is_mutable(): self._writekey = self._uri.writekey self._readkey = self._uri.readkey self._storage_index = self._uri.storage_index self._fingerprint = self._uri.fingerprint # the following values are learned during Retrieval # self._pubkey # self._required_shares # self._total_shares # and these are needed for Publish. They are filled in by Retrieval # if possible, otherwise by the first peer that Publish talks to. self._privkey = None self._encprivkey = None return self def create_with_keys(self, (pubkey, privkey), contents, version=SDMF_VERSION): """Call this to create a brand-new mutable file. It will create the shares, find homes for them, and upload the initial contents (created with the same rules as IClient.create_mutable_file() ). Returns a Deferred that fires (with the MutableFileNode instance you should use) when it completes. """ self._pubkey, self._privkey = pubkey, privkey pubkey_s = self._pubkey.serialize() privkey_s = self._privkey.serialize() self._writekey = hashutil.ssk_writekey_hash(privkey_s) self._encprivkey = self._encrypt_privkey(self._writekey, privkey_s) self._fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey_s) if version == MDMF_VERSION: self._uri = WriteableMDMFFileURI(self._writekey, self._fingerprint) self._protocol_version = version elif version == SDMF_VERSION: self._uri = WriteableSSKFileURI(self._writekey, self._fingerprint) self._protocol_version = version self._readkey = self._uri.readkey self._storage_index = self._uri.storage_index initial_contents = self._get_initial_contents(contents) return self._upload(initial_contents, None) def _get_initial_contents(self, contents): if contents is None: return MutableData("") if isinstance(contents, str): return MutableData(contents) if IMutableUploadable.providedBy(contents): return contents assert callable(contents), "%s should be callable, not %s" % \ (contents, type(contents)) return contents(self) def _encrypt_privkey(self, writekey, privkey): enc = AES(writekey) crypttext = enc.process(privkey) return crypttext def _decrypt_privkey(self, enc_privkey): enc = AES(self._writekey) privkey = enc.process(enc_privkey) return privkey def _populate_pubkey(self, pubkey): self._pubkey = pubkey def _populate_required_shares(self, required_shares): self._required_shares = required_shares def _populate_total_shares(self, total_shares): self._total_shares = total_shares def _populate_privkey(self, privkey): self._privkey = privkey def _populate_encprivkey(self, encprivkey): self._encprivkey = encprivkey def get_write_enabler(self, server): seed = server.get_foolscap_write_enabler_seed() assert len(seed) == 20 return hashutil.ssk_write_enabler_hash(self._writekey, seed) def get_renewal_secret(self, server): crs = self._secret_holder.get_renewal_secret() frs = hashutil.file_renewal_secret_hash(crs, self._storage_index) lease_seed = server.get_lease_seed() assert len(lease_seed) == 20 return hashutil.bucket_renewal_secret_hash(frs, lease_seed) def get_cancel_secret(self, server): ccs = self._secret_holder.get_cancel_secret() fcs = hashutil.file_cancel_secret_hash(ccs, self._storage_index) lease_seed = server.get_lease_seed() assert len(lease_seed) == 20 return hashutil.bucket_cancel_secret_hash(fcs, lease_seed) def get_writekey(self): return self._writekey def get_readkey(self): return self._readkey def get_storage_index(self): return self._storage_index def get_fingerprint(self): return self._fingerprint def get_privkey(self): return self._privkey def get_encprivkey(self): return self._encprivkey def get_pubkey(self): return self._pubkey def get_required_shares(self): return self._required_shares def get_total_shares(self): return self._total_shares #################################### # IFilesystemNode def get_size(self): return self._most_recent_size def get_current_size(self): d = self.get_size_of_best_version() d.addCallback(self._stash_size) return d def _stash_size(self, size): self._most_recent_size = size return size def get_cap(self): return self._uri def get_readcap(self): return self._uri.get_readonly() def get_verify_cap(self): return self._uri.get_verify_cap() def get_repair_cap(self): if self._uri.is_readonly(): return None return self._uri def get_uri(self): return self._uri.to_string() def get_write_uri(self): if self.is_readonly(): return None return self._uri.to_string() def get_readonly_uri(self): return self._uri.get_readonly().to_string() def get_readonly(self): if self.is_readonly(): return self ro = MutableFileNode(self._storage_broker, self._secret_holder, self._default_encoding_parameters, self._history) ro.init_from_cap(self._uri.get_readonly()) return ro def is_mutable(self): return self._uri.is_mutable() def is_readonly(self): return self._uri.is_readonly() def is_unknown(self): return False def is_allowed_in_immutable_directory(self): return not self._uri.is_mutable() def raise_error(self): pass def __hash__(self): return hash((self.__class__, self._uri)) def __cmp__(self, them): if cmp(type(self), type(them)): return cmp(type(self), type(them)) if cmp(self.__class__, them.__class__): return cmp(self.__class__, them.__class__) return cmp(self._uri, them._uri) ################################# # ICheckable def check(self, monitor, verify=False, add_lease=False): checker = MutableChecker(self, self._storage_broker, self._history, monitor) return checker.check(verify, add_lease) def check_and_repair(self, monitor, verify=False, add_lease=False): checker = MutableCheckAndRepairer(self, self._storage_broker, self._history, monitor) return checker.check(verify, add_lease) ################################# # IRepairable def repair(self, check_results, force=False, monitor=None): assert ICheckResults(check_results) r = Repairer(self, check_results, self._storage_broker, self._history, monitor) d = r.start(force) return d ################################# # IFileNode def get_best_readable_version(self): """ I return a Deferred that fires with a MutableFileVersion representing the best readable version of the file that I represent """ return self.get_readable_version() def get_readable_version(self, servermap=None, version=None): """ I return a Deferred that fires with an MutableFileVersion for my version argument, if there is a recoverable file of that version on the grid. If there is no recoverable version, I fire with an UnrecoverableFileError. If a servermap is provided, I look in there for the requested version. If no servermap is provided, I create and update a new one. If no version is provided, then I return a MutableFileVersion representing the best recoverable version of the file. """ d = self._get_version_from_servermap(MODE_READ, servermap, version) def _build_version((servermap, their_version)): assert their_version in servermap.recoverable_versions() assert their_version in servermap.make_versionmap() mfv = MutableFileVersion(self, servermap, their_version, self._storage_index, self._storage_broker, self._readkey, history=self._history) assert mfv.is_readonly() mfv.set_downloader_hints(self._downloader_hints) # our caller can use this to download the contents of the # mutable file. return mfv return d.addCallback(_build_version) def _get_version_from_servermap(self, mode, servermap=None, version=None): """ I return a Deferred that fires with (servermap, version). This function performs validation and a servermap update. If it returns (servermap, version), the caller can assume that: - servermap was last updated in mode. - version is recoverable, and corresponds to the servermap. If version and servermap are provided to me, I will validate that version exists in the servermap, and that the servermap was updated correctly. If version is not provided, but servermap is, I will validate the servermap and return the best recoverable version that I can find in the servermap. If the version is provided but the servermap isn't, I will obtain a servermap that has been updated in the correct mode and validate that version is found and recoverable. If neither servermap nor version are provided, I will obtain a servermap updated in the correct mode, and return the best recoverable version that I can find in there. """ # XXX: wording ^^^^ if servermap and servermap.get_last_update()[0] == mode: d = defer.succeed(servermap) else: d = self._get_servermap(mode) def _get_version(servermap, v): if v and v not in servermap.recoverable_versions(): v = None elif not v: v = servermap.best_recoverable_version() if not v: raise UnrecoverableFileError("no recoverable versions") return (servermap, v) return d.addCallback(_get_version, version) def download_best_version(self, progress=None): """ I return a Deferred that fires with the contents of the best version of this mutable file. """ return self._do_serialized(self._download_best_version, progress=progress) def _download_best_version(self, progress=None): """ I am the serialized sibling of download_best_version. """ d = self.get_best_readable_version() d.addCallback(self._record_size) d.addCallback(lambda version: version.download_to_data(progress=progress)) # It is possible that the download will fail because there # aren't enough shares to be had. If so, we will try again after # updating the servermap in MODE_WRITE, which may find more # shares than updating in MODE_READ, as we just did. We can do # this by getting the best mutable version and downloading from # that -- the best mutable version will be a MutableFileVersion # with a servermap that was last updated in MODE_WRITE, as we # want. If this fails, then we give up. def _maybe_retry(failure): failure.trap(NotEnoughSharesError) d = self.get_best_mutable_version() d.addCallback(self._record_size) d.addCallback(lambda version: version.download_to_data(progress=progress)) return d d.addErrback(_maybe_retry) return d def _record_size(self, mfv): """ I record the size of a mutable file version. """ self._most_recent_size = mfv.get_size() return mfv def get_size_of_best_version(self): """ I return the size of the best version of this mutable file. This is equivalent to calling get_size() on the result of get_best_readable_version(). """ d = self.get_best_readable_version() return d.addCallback(lambda mfv: mfv.get_size()) ################################# # IMutableFileNode def get_best_mutable_version(self, servermap=None): """ I return a Deferred that fires with a MutableFileVersion representing the best readable version of the file that I represent. I am like get_best_readable_version, except that I will try to make a writeable version if I can. """ return self.get_mutable_version(servermap=servermap) def get_mutable_version(self, servermap=None, version=None): """ I return a version of this mutable file. I return a Deferred that fires with a MutableFileVersion If version is provided, the Deferred will fire with a MutableFileVersion initailized with that version. Otherwise, it will fire with the best version that I can recover. If servermap is provided, I will use that to find versions instead of performing my own servermap update. """ if self.is_readonly(): return self.get_readable_version(servermap=servermap, version=version) # get_mutable_version => write intent, so we require that the # servermap is updated in MODE_WRITE d = self._get_version_from_servermap(MODE_WRITE, servermap, version) def _build_version((servermap, smap_version)): # these should have been set by the servermap update. assert self._secret_holder assert self._writekey mfv = MutableFileVersion(self, servermap, smap_version, self._storage_index, self._storage_broker, self._readkey, self._writekey, self._secret_holder, history=self._history) assert not mfv.is_readonly() mfv.set_downloader_hints(self._downloader_hints) return mfv return d.addCallback(_build_version) # XXX: I'm uncomfortable with the difference between upload and # overwrite, which, FWICT, is basically that you don't have to # do a servermap update before you overwrite. We split them up # that way anyway, so I guess there's no real difficulty in # offering both ways to callers, but it also makes the # public-facing API cluttery, and makes it hard to discern the # right way of doing things. # In general, we leave it to callers to ensure that they aren't # going to cause UncoordinatedWriteErrors when working with # MutableFileVersions. We know that the next three operations # (upload, overwrite, and modify) will all operate on the same # version, so we say that only one of them can be going on at once, # and serialize them to ensure that that actually happens, since as # the caller in this situation it is our job to do that. def overwrite(self, new_contents): """ I overwrite the contents of the best recoverable version of this mutable file with new_contents. This is equivalent to calling overwrite on the result of get_best_mutable_version with new_contents as an argument. I return a Deferred that eventually fires with the results of my replacement process. """ # TODO: Update downloader hints. return self._do_serialized(self._overwrite, new_contents) def _overwrite(self, new_contents): """ I am the serialized sibling of overwrite. """ d = self.get_best_mutable_version() d.addCallback(lambda mfv: mfv.overwrite(new_contents)) d.addCallback(self._did_upload, new_contents.get_size()) return d def upload(self, new_contents, servermap): """ I overwrite the contents of the best recoverable version of this mutable file with new_contents, using servermap instead of creating/updating our own servermap. I return a Deferred that fires with the results of my upload. """ # TODO: Update downloader hints return self._do_serialized(self._upload, new_contents, servermap) def modify(self, modifier, backoffer=None): """ I modify the contents of the best recoverable version of this mutable file with the modifier. This is equivalent to calling modify on the result of get_best_mutable_version. I return a Deferred that eventually fires with an UploadResults instance describing this process. """ # TODO: Update downloader hints. return self._do_serialized(self._modify, modifier, backoffer) def _modify(self, modifier, backoffer): """ I am the serialized sibling of modify. """ d = self.get_best_mutable_version() d.addCallback(lambda mfv: mfv.modify(modifier, backoffer)) return d def download_version(self, servermap, version, fetch_privkey=False): """ Download the specified version of this mutable file. I return a Deferred that fires with the contents of the specified version as a bytestring, or errbacks if the file is not recoverable. """ d = self.get_readable_version(servermap, version) return d.addCallback(lambda mfv: mfv.download_to_data(fetch_privkey)) def get_servermap(self, mode): """ I return a servermap that has been updated in mode. mode should be one of MODE_READ, MODE_WRITE, MODE_CHECK or MODE_ANYTHING. See servermap.py for more on what these mean. """ return self._do_serialized(self._get_servermap, mode) def _get_servermap(self, mode): """ I am a serialized twin to get_servermap. """ servermap = ServerMap() d = self._update_servermap(servermap, mode) # The servermap will tell us about the most recent size of the # file, so we may as well set that so that callers might get # more data about us. if not self._most_recent_size: d.addCallback(self._get_size_from_servermap) return d def _get_size_from_servermap(self, servermap): """ I extract the size of the best version of this file and record it in self._most_recent_size. I return the servermap that I was given. """ if servermap.recoverable_versions(): v = servermap.best_recoverable_version() size = v[4] # verinfo[4] == size self._most_recent_size = size return servermap def _update_servermap(self, servermap, mode): u = ServermapUpdater(self, self._storage_broker, Monitor(), servermap, mode) if self._history: self._history.notify_mapupdate(u.get_status()) return u.update() #def set_version(self, version): # I can be set in two ways: # 1. When the node is created. # 2. (for an existing share) when the Servermap is updated # before I am read. # assert version in (MDMF_VERSION, SDMF_VERSION) # self._protocol_version = version def get_version(self): return self._protocol_version def _do_serialized(self, cb, *args, **kwargs): # note: to avoid deadlock, this callable is *not* allowed to invoke # other serialized methods within this (or any other) # MutableFileNode. The callable should be a bound method of this same # MFN instance. d = defer.Deferred() self._serializer.addCallback(lambda ignore: cb(*args, **kwargs)) # we need to put off d.callback until this Deferred is finished being # processed. Otherwise the caller's subsequent activities (like, # doing other things with this node) can cause reentrancy problems in # the Deferred code itself self._serializer.addBoth(lambda res: eventually(d.callback, res)) # add a log.err just in case something really weird happens, because # self._serializer stays around forever, therefore we won't see the # usual Unhandled Error in Deferred that would give us a hint. self._serializer.addErrback(log.err) return d def _upload(self, new_contents, servermap): """ A MutableFileNode still has to have some way of getting published initially, which is what I am here for. After that, all publishing, updating, modifying and so on happens through MutableFileVersions. """ assert self._pubkey, "update_servermap must be called before publish" # Define IPublishInvoker with a set_downloader_hints method? # Then have the publisher call that method when it's done publishing? p = Publish(self, self._storage_broker, servermap) if self._history: self._history.notify_publish(p.get_status(), new_contents.get_size()) d = p.publish(new_contents) d.addCallback(self._did_upload, new_contents.get_size()) return d def set_downloader_hints(self, hints): self._downloader_hints = hints def _did_upload(self, res, size): self._most_recent_size = size return res class MutableFileVersion: """ I represent a specific version (most likely the best version) of a mutable file. Since I implement IReadable, instances which hold a reference to an instance of me are guaranteed the ability (absent connection difficulties or unrecoverable versions) to read the file that I represent. Depending on whether I was initialized with a write capability or not, I may also provide callers the ability to overwrite or modify the contents of the mutable file that I reference. """ implements(IMutableFileVersion, IWriteable) def __init__(self, node, servermap, version, storage_index, storage_broker, readcap, writekey=None, write_secrets=None, history=None): self._node = node self._servermap = servermap self._version = version self._storage_index = storage_index self._write_secrets = write_secrets self._history = history self._storage_broker = storage_broker #assert isinstance(readcap, IURI) self._readcap = readcap self._writekey = writekey self._serializer = defer.succeed(None) def get_sequence_number(self): """ Get the sequence number of the mutable version that I represent. """ return self._version[0] # verinfo[0] == the sequence number # TODO: Terminology? def get_writekey(self): """ I return a writekey or None if I don't have a writekey. """ return self._writekey def set_downloader_hints(self, hints): """ I set the downloader hints. """ assert isinstance(hints, dict) self._downloader_hints = hints def get_downloader_hints(self): """ I return the downloader hints. """ return self._downloader_hints def overwrite(self, new_contents): """ I overwrite the contents of this mutable file version with the data in new_contents. """ assert not self.is_readonly() return self._do_serialized(self._overwrite, new_contents) def _overwrite(self, new_contents): assert IMutableUploadable.providedBy(new_contents) assert self._servermap.get_last_update()[0] == MODE_WRITE return self._upload(new_contents) def modify(self, modifier, backoffer=None): """I use a modifier callback to apply a change to the mutable file. I implement the following pseudocode:: obtain_mutable_filenode_lock() first_time = True while True: update_servermap(MODE_WRITE) old = retrieve_best_version() new = modifier(old, servermap, first_time) first_time = False if new == old: break try: publish(new) except UncoordinatedWriteError, e: backoffer(e) continue break release_mutable_filenode_lock() The idea is that your modifier function can apply a delta of some sort, and it will be re-run as necessary until it succeeds. The modifier must inspect the old version to see whether its delta has already been applied: if so it should return the contents unmodified. Note that the modifier is required to run synchronously, and must not invoke any methods on this MutableFileNode instance. The backoff-er is a callable that is responsible for inserting a random delay between subsequent attempts, to help competing updates from colliding forever. It is also allowed to give up after a while. The backoffer is given two arguments: this MutableFileNode, and the Failure object that contains the UncoordinatedWriteError. It should return a Deferred that will fire when the next attempt should be made, or return the Failure if the loop should give up. If backoffer=None, a default one is provided which will perform exponential backoff, and give up after 4 tries. Note that the backoffer should not invoke any methods on this MutableFileNode instance, and it needs to be highly conscious of deadlock issues. """ assert not self.is_readonly() return self._do_serialized(self._modify, modifier, backoffer) def _modify(self, modifier, backoffer): if backoffer is None: backoffer = BackoffAgent().delay return self._modify_and_retry(modifier, backoffer, True) def _modify_and_retry(self, modifier, backoffer, first_time): """ I try to apply modifier to the contents of this version of the mutable file. If I succeed, I return an UploadResults instance describing my success. If I fail, I try again after waiting for a little bit. """ log.msg("doing modify") if first_time: d = self._update_servermap() else: # We ran into trouble; do MODE_CHECK so we're a little more # careful on subsequent tries. d = self._update_servermap(mode=MODE_CHECK) d.addCallback(lambda ignored: self._modify_once(modifier, first_time)) def _retry(f): f.trap(UncoordinatedWriteError) # Uh oh, it broke. We're allowed to trust the servermap for our # first try, but after that we need to update it. It's # possible that we've failed due to a race with another # uploader, and if the race is to converge correctly, we # need to know about that upload. d2 = defer.maybeDeferred(backoffer, self, f) d2.addCallback(lambda ignored: self._modify_and_retry(modifier, backoffer, False)) return d2 d.addErrback(_retry) return d def _modify_once(self, modifier, first_time): """ I attempt to apply a modifier to the contents of the mutable file. """ assert self._servermap.get_last_update()[0] != MODE_READ # download_to_data is serialized, so we have to call this to # avoid deadlock. d = self._try_to_download_data() def _apply(old_contents): new_contents = modifier(old_contents, self._servermap, first_time) precondition((isinstance(new_contents, str) or new_contents is None), "Modifier function must return a string " "or None") if new_contents is None or new_contents == old_contents: log.msg("no changes") # no changes need to be made if first_time: return # However, since Publish is not automatically doing a # recovery when it observes UCWE, we need to do a second # publish. See #551 for details. We'll basically loop until # we managed an uncontested publish. old_uploadable = MutableData(old_contents) new_contents = old_uploadable else: new_contents = MutableData(new_contents) return self._upload(new_contents) d.addCallback(_apply) return d def is_readonly(self): """ I return True if this MutableFileVersion provides no write access to the file that it encapsulates, and False if it provides the ability to modify the file. """ return self._writekey is None def is_mutable(self): """ I return True, since mutable files are always mutable by somebody. """ return True def get_storage_index(self): """ I return the storage index of the reference that I encapsulate. """ return self._storage_index def get_size(self): """ I return the length, in bytes, of this readable object. """ return self._servermap.size_of_version(self._version) def download_to_data(self, fetch_privkey=False, progress=None): """ I return a Deferred that fires with the contents of this readable object as a byte string. """ c = consumer.MemoryConsumer(progress=progress) d = self.read(c, fetch_privkey=fetch_privkey) d.addCallback(lambda mc: "".join(mc.chunks)) return d def _try_to_download_data(self): """ I am an unserialized cousin of download_to_data; I am called from the children of modify() to download the data associated with this mutable version. """ c = consumer.MemoryConsumer() # modify will almost certainly write, so we need the privkey. d = self._read(c, fetch_privkey=True) d.addCallback(lambda mc: "".join(mc.chunks)) return d def read(self, consumer, offset=0, size=None, fetch_privkey=False): """ I read a portion (possibly all) of the mutable file that I reference into consumer. """ return self._do_serialized(self._read, consumer, offset, size, fetch_privkey) def _read(self, consumer, offset=0, size=None, fetch_privkey=False): """ I am the serialized companion of read. """ r = Retrieve(self._node, self._storage_broker, self._servermap, self._version, fetch_privkey) if self._history: self._history.notify_retrieve(r.get_status()) d = r.download(consumer, offset, size) return d def _do_serialized(self, cb, *args, **kwargs): # note: to avoid deadlock, this callable is *not* allowed to invoke # other serialized methods within this (or any other) # MutableFileNode. The callable should be a bound method of this same # MFN instance. d = defer.Deferred() self._serializer.addCallback(lambda ignore: cb(*args, **kwargs)) # we need to put off d.callback until this Deferred is finished being # processed. Otherwise the caller's subsequent activities (like, # doing other things with this node) can cause reentrancy problems in # the Deferred code itself self._serializer.addBoth(lambda res: eventually(d.callback, res)) # add a log.err just in case something really weird happens, because # self._serializer stays around forever, therefore we won't see the # usual Unhandled Error in Deferred that would give us a hint. self._serializer.addErrback(log.err) return d def _upload(self, new_contents): #assert self._pubkey, "update_servermap must be called before publish" p = Publish(self._node, self._storage_broker, self._servermap) if self._history: self._history.notify_publish(p.get_status(), new_contents.get_size()) d = p.publish(new_contents) d.addCallback(self._did_upload, new_contents.get_size()) return d def _did_upload(self, res, size): self._most_recent_size = size return res def update(self, data, offset): """ Do an update of this mutable file version by inserting data at offset within the file. If offset is the EOF, this is an append operation. I return a Deferred that fires with the results of the update operation when it has completed. In cases where update does not append any data, or where it does not append so many blocks that the block count crosses a power-of-two boundary, this operation will use roughly O(data.get_size()) memory/bandwidth/CPU to perform the update. Otherwise, it must download, re-encode, and upload the entire file again, which will use O(filesize) resources. """ return self._do_serialized(self._update, data, offset) def _update(self, data, offset): """ I update the mutable file version represented by this particular IMutableVersion by inserting the data in data at the offset offset. I return a Deferred that fires when this has been completed. """ new_size = data.get_size() + offset old_size = self.get_size() segment_size = self._version[3] num_old_segments = mathutil.div_ceil(old_size, segment_size) num_new_segments = mathutil.div_ceil(new_size, segment_size) log.msg("got %d old segments, %d new segments" % \ (num_old_segments, num_new_segments)) # We do a whole file re-encode if the file is an SDMF file. if self._version[2]: # version[2] == SDMF salt, which MDMF lacks log.msg("doing re-encode instead of in-place update") return self._do_modify_update(data, offset) # Otherwise, we can replace just the parts that are changing. log.msg("updating in place") d = self._do_update_update(data, offset) d.addCallback(self._decode_and_decrypt_segments, data, offset) d.addCallback(self._build_uploadable_and_finish, data, offset) return d def _do_modify_update(self, data, offset): """ I perform a file update by modifying the contents of the file after downloading it, then reuploading it. I am less efficient than _do_update_update, but am necessary for certain updates. """ def m(old, servermap, first_time): start = offset rest = offset + data.get_size() new = old[:start] new += "".join(data.read(data.get_size())) new += old[rest:] return new return self._modify(m, None) def _do_update_update(self, data, offset): """ I start the Servermap update that gets us the data we need to continue the update process. I return a Deferred that fires when the servermap update is done. """ assert IMutableUploadable.providedBy(data) assert self.is_mutable() # offset == self.get_size() is valid and means that we are # appending data to the file. assert offset <= self.get_size() segsize = self._version[3] # We'll need the segment that the data starts in, regardless of # what we'll do later. start_segment = offset // segsize # We only need the end segment if the data we append does not go # beyond the current end-of-file. end_segment = start_segment if offset + data.get_size() < self.get_size(): end_data = offset + data.get_size() # The last byte we touch is the end_data'th byte, which is actually # byte end_data - 1 because bytes are zero-indexed. end_data -= 1 end_segment = end_data // segsize self._start_segment = start_segment self._end_segment = end_segment # Now ask for the servermap to be updated in MODE_WRITE with # this update range. return self._update_servermap(update_range=(start_segment, end_segment)) def _decode_and_decrypt_segments(self, ignored, data, offset): """ After the servermap update, I take the encrypted and encoded data that the servermap fetched while doing its update and transform it into decoded-and-decrypted plaintext that can be used by the new uploadable. I return a Deferred that fires with the segments. """ r = Retrieve(self._node, self._storage_broker, self._servermap, self._version) # decode: takes in our blocks and salts from the servermap, # returns a Deferred that fires with the corresponding plaintext # segments. Does not download -- simply takes advantage of # existing infrastructure within the Retrieve class to avoid # duplicating code. sm = self._servermap # XXX: If the methods in the servermap don't work as # abstractions, you should rewrite them instead of going around # them. update_data = sm.update_data start_segments = {} # shnum -> start segment end_segments = {} # shnum -> end segment blockhashes = {} # shnum -> blockhash tree for (shnum, original_data) in update_data.iteritems(): data = [d[1] for d in original_data if d[0] == self._version] # data is [(blockhashes,start,end)..] # Every data entry in our list should now be share shnum for # a particular version of the mutable file, so all of the # entries should be identical. datum = data[0] assert [x for x in data if x != datum] == [] # datum is (blockhashes,start,end) blockhashes[shnum] = datum[0] start_segments[shnum] = datum[1] # (block,salt) bytestrings end_segments[shnum] = datum[2] d1 = r.decode(start_segments, self._start_segment) d2 = r.decode(end_segments, self._end_segment) d3 = defer.succeed(blockhashes) return deferredutil.gatherResults([d1, d2, d3]) def _build_uploadable_and_finish(self, segments_and_bht, data, offset): """ After the process has the plaintext segments, I build the TransformingUploadable that the publisher will eventually re-upload to the grid. I then invoke the publisher with that uploadable, and return a Deferred when the publish operation has completed without issue. """ u = TransformingUploadable(data, offset, self._version[3], segments_and_bht[0], segments_and_bht[1]) p = Publish(self._node, self._storage_broker, self._servermap) return p.update(u, offset, segments_and_bht[2], self._version) def _update_servermap(self, mode=MODE_WRITE, update_range=None): """ I update the servermap. I return a Deferred that fires when the servermap update is done. """ if update_range: u = ServermapUpdater(self._node, self._storage_broker, Monitor(), self._servermap, mode=mode, update_range=update_range) else: u = ServermapUpdater(self._node, self._storage_broker, Monitor(), self._servermap, mode=mode) return u.update()
gpl-2.0
shinyChen/browserscope
test/test_util.py
9
6660
#!/usr/bin/python2.5 # # Copyright 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the 'License') # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an 'AS IS' BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Shared Models Unit Tests.""" __author__ = '[email protected] (Lindsey Simon)' import unittest import random import logging from google.appengine.ext import db from google.appengine.api import memcache from django.test.client import Client from base import util from categories import all_test_sets from categories import test_set_params from models import result from models.user_agent import UserAgent import mock_data import settings from categories import richtext class TestHome(unittest.TestCase): def setUp(self): self.client = Client() def testHome(self): response = self.client.get('/', {}, **mock_data.UNIT_TEST_UA) self.assertEqual(200, response.status_code) #def testHomeWithResults(self): #test_set = mock_data.MockTestSet('cat_home') #params = {'cat_home_results': 'apple=0,banana=97,coconut=677'} #response = self.client.get('/', params, **mock_data.UNIT_TEST_UA) #self.assertEqual(200, response.status_code) class TestBeacon(unittest.TestCase): def setUp(self): self.test_set = mock_data.MockTestSet() all_test_sets.AddTestSet(self.test_set) self.client = Client() def tearDown(self): all_test_sets.RemoveTestSet(self.test_set) def testBeaconWithoutCsrfToken(self): params = {} response = self.client.get('/beacon', params, **mock_data.UNIT_TEST_UA) self.assertEqual(403, response.status_code) def testBeaconWithoutCategory(self): csrf_token = self.client.get('/get_csrf').content params = {'results': 'testDisply:200', 'csrf_token': csrf_token} response = self.client.get('/beacon', params, **mock_data.UNIT_TEST_UA) self.assertEqual(util.BAD_BEACON_MSG + 'Category/Results', response.content) def testBeacon(self): csrf_token = self.client.get('/get_csrf').content params = { 'category': self.test_set.category, 'results': 'apple=1,banana=2,coconut=4', 'csrf_token': csrf_token } response = self.client.get('/beacon', params, **mock_data.UNIT_TEST_UA) self.assertEqual(204, response.status_code) # Did a ResultParent get created? query = db.Query(result.ResultParent) query.filter('category =', self.test_set.category) result_parent = query.get() self.assertNotEqual(result_parent, None) result_times = result_parent.GetResultTimes() self.assertEqual( [('apple', 1, False), ('banana', 2, False), ('coconut', 4, False)], sorted((x.test, x.score, x.dirty) for x in result_times)) def testBeaconWithChromeFrame(self): csrf_token = self.client.get('/get_csrf').content chrome_ua_string = ('Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) ' 'AppleWebKit/530.1 (KHTML, like Gecko) Chrome/4.0.169.1 Safari/530.1') chrome_frame_ua_string = ('Mozilla/4.0 ' '(compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; ' 'chromeframe; ' '.NET CLR 2.0.50727; .NET CLR 1.1.4322; ' '.NET CLR 3.0.04506.648; .NET CLR 3.5.21022)') unit_test_ua = mock_data.UNIT_TEST_UA unit_test_ua['HTTP_USER_AGENT'] = chrome_frame_ua_string params = { 'category': self.test_set.category, 'results': 'apple=0,banana=0,coconut=1000', 'csrf_token': csrf_token, 'js_ua': chrome_ua_string } response = self.client.get('/beacon', params, **unit_test_ua) self.assertEqual(204, response.status_code) # Did a ResultParent get created? query = db.Query(result.ResultParent) query.filter('category =', self.test_set.category) result_parent = query.get() self.assertNotEqual(result_parent, None) # What UA did the ResultParent get tied to? Chrome Frame (IE 7) I hope. user_agent = result_parent.user_agent self.assertEqual('Chrome Frame (IE 7) 4.0.169', user_agent.pretty()) # Were ResultTimes created? result_times = result_parent.GetResultTimes() self.assertEqual( [('apple', 0, False), ('banana', 0, False), ('coconut', 1000, False)], sorted((x.test, x.score, x.dirty) for x in result_times)) def testBeaconWithBogusTests(self): csrf_token = self.client.get('/get_csrf').content params = { 'category': self.test_set.category, 'results': 'testBogus=1,testVisibility=2', 'csrf_token': csrf_token } response = self.client.get('/beacon', params, **mock_data.UNIT_TEST_UA) self.assertEqual(util.BAD_BEACON_MSG + 'ResultParent', response.content) # Did a ResultParent get created? Shouldn't have. query = db.Query(result.ResultParent) query.filter('category =', self.test_set.category) result_parent = query.get() self.assertEqual(None, result_parent) def testBeaconWithoutTestSet(self): category = 'test_beacon_wo_test_set' csrf_token = self.client.get('/get_csrf').content params = { 'category': category, 'results': 'testDisplay=1,testVisibility=2', 'csrf_token': csrf_token } response = self.client.get('/beacon', params, **mock_data.UNIT_TEST_UA) self.assertEqual(util.BAD_BEACON_MSG + 'TestSet', response.content) class TestUtilFunctions(unittest.TestCase): def testCheckThrottleIpAddress(self): ip = mock_data.UNIT_TEST_UA['REMOTE_ADDR'] ua_string = mock_data.UNIT_TEST_UA['HTTP_USER_AGENT'] category = 'foo' for i in range(11): self.assertTrue(util.CheckThrottleIpAddress(ip, ua_string, category)) # The next one should bomb. self.assertFalse(util.CheckThrottleIpAddress(ip, ua_string, category)) # But a new category should work fine. self.assertTrue(util.CheckThrottleIpAddress(ip, ua_string, 'bar')) class TestClearMemcache(unittest.TestCase): def setUp(self): self.client = Client() def testClearMemcacheRecentTests(self): memcache.set(util.RECENT_TESTS_MEMCACHE_KEY, 'foo') params = {'recent': 1} response = self.client.get('/clear_memcache', params) recent_tests = memcache.get(util.RECENT_TESTS_MEMCACHE_KEY) self.assertEqual(None, recent_tests) self.assertEqual(200, response.status_code) if __name__ == '__main__': unittest.main()
apache-2.0
wolfram74/numerical_methods_iserles_notes
venv/lib/python2.7/site-packages/sympy/calculus/tests/test_finite_diff.py
18
7438
from sympy import S, symbols, Function from sympy.calculus.finite_diff import ( apply_finite_diff, finite_diff_weights, as_finite_diff ) def test_apply_finite_diff(): x, h = symbols('x h') f = Function('f') assert (apply_finite_diff(1, [x-h, x+h], [f(x-h), f(x+h)], x) - (f(x+h)-f(x-h))/(2*h)).simplify() == 0 assert (apply_finite_diff(1, [5, 6, 7], [f(5), f(6), f(7)], 5) - (-S(3)/2*f(5) + 2*f(6) - S(1)/2*f(7))).simplify() == 0 def test_finite_diff_weights(): d = finite_diff_weights(1, [5, 6, 7], 5) assert d[1][2] == [-S(3)/2, 2, -S(1)/2] # Table 1, p. 702 in doi:10.1090/S0025-5718-1988-0935077-0 # -------------------------------------------------------- # x = [[0], [-1, 0, 1], ...] xl = [[j for j in range(-i, i+1)] for i in range(0, 5)] # d holds all coefficients d = [finite_diff_weights({0: 0, 1: 2, 2: 4, 3: 4, 4: 4}[i], xl[i], 0) for i in range(5)] # Zeroeth derivative assert d[0][0][0] == [S(1)] # First derivative assert d[1][1][2] == [-S(1)/2, S(0), S(1)/2] assert d[2][1][4] == [S(1)/12, -S(2)/3, S(0), S(2)/3, -S(1)/12] assert d[3][1][6] == [-S(1)/60, S(3)/20, -S(3)/4, S(0), S(3)/4, -S(3)/20, S(1)/60] assert d[4][1][8] == [S(1)/280, -S(4)/105, S(1)/5, -S(4)/5, S(0), S(4)/5, -S(1)/5, S(4)/105, -S(1)/280] # Second derivative assert d[1][2][2] == [S(1), -S(2), S(1)] assert d[2][2][4] == [-S(1)/12, S(4)/3, -S(5)/2, S(4)/3, -S(1)/12] assert d[3][2][6] == [S(1)/90, -S(3)/20, S(3)/2, -S(49)/18, S(3)/2, -S(3)/20, S(1)/90] assert d[4][2][8] == [-S(1)/560, S(8)/315, -S(1)/5, S(8)/5, -S(205)/72, S(8)/5, -S(1)/5, S(8)/315, -S(1)/560] # Third derivative assert d[2][3][4] == [-S(1)/2, S(1), S(0), -S(1), S(1)/2] assert d[3][3][6] == [S(1)/8, -S(1), S(13)/8, S(0), -S(13)/8, S(1), -S(1)/8] assert d[4][3][8] == [-S(7)/240, S(3)/10, -S(169)/120, S(61)/30, S(0), -S(61)/30, S(169)/120, -S(3)/10, S(7)/240] # Fourth derivative assert d[2][4][4] == [S(1), -S(4), S(6), -S(4), S(1)] assert d[3][4][6] == [-S(1)/6, S(2), -S(13)/2, S(28)/3, -S(13)/2, S(2), -S(1)/6] assert d[4][4][8] == [S(7)/240, -S(2)/5, S(169)/60, -S(122)/15, S(91)/8, -S(122)/15, S(169)/60, -S(2)/5, S(7)/240] # Table 2, p. 703 in doi:10.1090/S0025-5718-1988-0935077-0 # -------------------------------------------------------- xl = [[j/S(2) for j in list(range(-i*2+1, 0, 2))+list(range(1, i*2+1, 2))] for i in range(1, 5)] # d holds all coefficients d = [finite_diff_weights({0: 1, 1: 2, 2: 4, 3: 4}[i], xl[i], 0) for i in range(4)] # Zeroth derivative assert d[0][0][1] == [S(1)/2, S(1)/2] assert d[1][0][3] == [-S(1)/16, S(9)/16, S(9)/16, -S(1)/16] assert d[2][0][5] == [S(3)/256, -S(25)/256, S(75)/128, S(75)/128, -S(25)/256, S(3)/256] assert d[3][0][7] == [-S(5)/2048, S(49)/2048, -S(245)/2048, S(1225)/2048, S(1225)/2048, -S(245)/2048, S(49)/2048, -S(5)/2048] # First derivative assert d[0][1][1] == [-S(1), S(1)] assert d[1][1][3] == [S(1)/24, -S(9)/8, S(9)/8, -S(1)/24] assert d[2][1][5] == [-S(3)/640, S(25)/384, -S(75)/64, S(75)/64, -S(25)/384, S(3)/640] assert d[3][1][7] == [S(5)/7168, -S(49)/5120, S(245)/3072, S(-1225)/1024, S(1225)/1024, -S(245)/3072, S(49)/5120, -S(5)/7168] # Reasonably the rest of the table is also correct... (testing of that # deemed excessive at the moment) def test_as_finite_diff(): x, h = symbols('x h') f = Function('f') # Central 1st derivative at gridpoint assert (as_finite_diff(f(x).diff(x), [x-2, x-1, x, x+1, x+2]) - (S(1)/12*(f(x-2)-f(x+2)) + S(2)/3*(f(x+1)-f(x-1)))).simplify() == 0 # Central 1st derivative "half-way" assert (as_finite_diff(f(x).diff(x)) - (f(x + S(1)/2)-f(x - S(1)/2))).simplify() == 0 assert (as_finite_diff(f(x).diff(x), h) - (f(x + h/S(2))-f(x - h/S(2)))/h).simplify() == 0 assert (as_finite_diff(f(x).diff(x), [x - 3*h, x-h, x+h, x + 3*h]) - (S(9)/(8*2*h)*(f(x+h) - f(x-h)) + S(1)/(24*2*h)*(f(x - 3*h) - f(x + 3*h)))).simplify() == 0 # One sided 1st derivative at gridpoint assert (as_finite_diff(f(x).diff(x), [0, 1, 2], 0) - (-S(3)/2*f(0) + 2*f(1) - f(2)/2)).simplify() == 0 assert (as_finite_diff(f(x).diff(x), [x, x+h], x) - (f(x+h) - f(x))/h).simplify() == 0 assert (as_finite_diff(f(x).diff(x), [x-h, x, x+h], x-h) - (-S(3)/(2*h)*f(x-h) + 2/h*f(x) - S(1)/(2*h)*f(x+h))).simplify() == 0 # One sided 1st derivative "half-way" assert (as_finite_diff(f(x).diff(x), [x-h, x+h, x + 3*h, x + 5*h, x + 7*h]) - 1/(2*h)*(-S(11)/(12)*f(x-h) + S(17)/(24)*f(x+h) + S(3)/8*f(x + 3*h) - S(5)/24*f(x + 5*h) + S(1)/24*f(x + 7*h))).simplify() == 0 # Central 2nd derivative at gridpoint assert (as_finite_diff(f(x).diff(x, 2), [x-h, x, x+h]) - h**-2 * (f(x-h) + f(x+h) - 2*f(x))).simplify() == 0 assert (as_finite_diff(f(x).diff(x, 2), [x - 2*h, x-h, x, x+h, x + 2*h]) - h**-2 * (-S(1)/12*(f(x - 2*h) + f(x + 2*h)) + S(4)/3*(f(x+h) + f(x-h)) - S(5)/2*f(x))).simplify() == 0 # Central 2nd derivative "half-way" assert (as_finite_diff(f(x).diff(x, 2), [x - 3*h, x-h, x+h, x + 3*h]) - (2*h)**-2 * (S(1)/2*(f(x - 3*h) + f(x + 3*h)) - S(1)/2*(f(x+h) + f(x-h)))).simplify() == 0 # One sided 2nd derivative at gridpoint assert (as_finite_diff(f(x).diff(x, 2), [x, x+h, x + 2*h, x + 3*h]) - h**-2 * (2*f(x) - 5*f(x+h) + 4*f(x+2*h) - f(x+3*h))).simplify() == 0 # One sided 2nd derivative at "half-way" assert (as_finite_diff(f(x).diff(x, 2), [x-h, x+h, x + 3*h, x + 5*h]) - (2*h)**-2 * (S(3)/2*f(x-h) - S(7)/2*f(x+h) + S(5)/2*f(x + 3*h) - S(1)/2*f(x + 5*h))).simplify() == 0 # Central 3rd derivative at gridpoint assert (as_finite_diff(f(x).diff(x, 3)) - (-f(x - 3/S(2)) + 3*f(x - 1/S(2)) - 3*f(x + 1/S(2)) + f(x + 3/S(2)))).simplify() == 0 assert (as_finite_diff( f(x).diff(x, 3), [x - 3*h, x - 2*h, x-h, x, x+h, x + 2*h, x + 3*h]) - h**-3 * (S(1)/8*(f(x - 3*h) - f(x + 3*h)) - f(x - 2*h) + f(x + 2*h) + S(13)/8*(f(x-h) - f(x+h)))).simplify() == 0 # Central 3rd derivative at "half-way" assert (as_finite_diff(f(x).diff(x, 3), [x - 3*h, x-h, x+h, x + 3*h]) - (2*h)**-3 * (f(x + 3*h)-f(x - 3*h) + 3*(f(x-h)-f(x+h)))).simplify() == 0 # One sided 3rd derivative at gridpoint assert (as_finite_diff(f(x).diff(x, 3), [x, x+h, x + 2*h, x + 3*h]) - h**-3 * (f(x + 3*h)-f(x) + 3*(f(x+h)-f(x + 2*h)))).simplify() == 0 # One sided 3rd derivative at "half-way" assert (as_finite_diff(f(x).diff(x, 3), [x-h, x+h, x + 3*h, x + 5*h]) - (2*h)**-3 * (f(x + 5*h)-f(x-h) + 3*(f(x+h)-f(x + 3*h)))).simplify() == 0
mit
TamiaLab/carnetdumaker
apps/bugtracker/tests/test_context_processors.py
1
2920
""" Tests suite for the context processors of the bug tracker app. """ from django.test import SimpleTestCase from django.http import HttpRequest from ..context_processors import bugtracker from ..constants import (STATUS_OPEN, STATUS_NEED_DETAILS, STATUS_CONFIRMED, STATUS_WORKING_ON, STATUS_DEFERRED, STATUS_DUPLICATE, STATUS_WONT_FIX, STATUS_CLOSED, STATUS_FIXED) from ..constants import (PRIORITY_GODZILLA, PRIORITY_CRITICAL, PRIORITY_MAJOR, PRIORITY_MINOR, PRIORITY_TRIVIAL, PRIORITY_NEED_REVIEW, PRIORITY_FEATURE, PRIORITY_WISHLIST, PRIORITY_INVALID, PRIORITY_NOT_MY_FAULT) from ..constants import (DIFFICULTY_DESIGN_ERRORS, DIFFICULTY_IMPORTANT, DIFFICULTY_NORMAL, DIFFICULTY_LOW_IMPACT, DIFFICULTY_OPTIONAL) class BugTrackerContextProcessorTestCase(SimpleTestCase): """ Tests case for the context processor. """ def test_bugtracker_context_update(self): """ Test if the ``bugtracker`` context processor add the constants into the context. """ request = HttpRequest() result = bugtracker(request) self.assertEqual(result, { 'BUGTRACKER_STATUS': { 'OPEN': STATUS_OPEN, 'NEED_DETAILS': STATUS_NEED_DETAILS, 'CONFIRMED': STATUS_CONFIRMED, 'WORKING_ON': STATUS_WORKING_ON, 'DEFERRED': STATUS_DEFERRED, 'DUPLICATE': STATUS_DUPLICATE, 'WONT_FIX': STATUS_WONT_FIX, 'CLOSED': STATUS_CLOSED, 'FIXED': STATUS_FIXED, }, 'BUGTRACKER_PRIORITY': { 'GODZILLA': PRIORITY_GODZILLA, 'CRITICAL': PRIORITY_CRITICAL, 'MAJOR': PRIORITY_MAJOR, 'MINOR': PRIORITY_MINOR, 'TRIVIAL': PRIORITY_TRIVIAL, 'NEED_REVIEW': PRIORITY_NEED_REVIEW, 'FEATURE': PRIORITY_FEATURE, 'WISHLIST': PRIORITY_WISHLIST, 'INVALID': PRIORITY_INVALID, 'NOT_MY_FAULT': PRIORITY_NOT_MY_FAULT, }, 'BUGTRACKER_DIFFICULTY': { 'DESIGN_ERRORS': DIFFICULTY_DESIGN_ERRORS, 'IMPORTANT': DIFFICULTY_IMPORTANT, 'NORMAL': DIFFICULTY_NORMAL, 'LOW_IMPACT': DIFFICULTY_LOW_IMPACT, 'OPTIONAL': DIFFICULTY_OPTIONAL, }, })
agpl-3.0
ccarrascal/drupal-commerce
sites/all/libraries/elfinder/src/connectors/python/connector.py
74
2687
#!/usr/bin/env python import cgi try: import json except ImportError: import simplejson as json import elFinder # configure connector options opts = { #'root': '/home/troex/Sites/git/elfinder/files', 'root': '../git/elfinder/files/', 'URL': 'http://localhost:8001/~troex/git/elfinder/files', ## other options 'debug': True, 'fileURL': True, # download files using connector, no direct urls to files # 'dirSize': True, # 'dotFiles': True, # 'perms': { # 'backup': { # 'read': True, # 'write': False, # 'rm': False # }, # '^/pics': { # 'read': True, # 'write': False, # 'rm': False # } # }, # 'uploadDeny': ['image', 'application'], # 'uploadAllow': ['image/png', 'image/jpeg'], # 'uploadOrder': ['deny', 'allow'] # 'disabled': ['rename', 'quicklook', 'upload'] } # init connector and pass options elf = elFinder.connector(opts) # fetch only needed GET/POST parameters httpRequest = {} form = cgi.FieldStorage() for field in elf.httpAllowedParameters: if field in form: httpRequest[field] = form.getvalue(field) if field == 'upload[]': upFiles = {} cgiUploadFiles = form['upload[]'] for up in cgiUploadFiles: if up.filename: upFiles[up.filename] = up.file # pack dict(filename: filedescriptor) httpRequest['upload[]'] = upFiles # run connector with parameters status, header, response = elf.run(httpRequest) # get connector output and print it out # code below is tested with apache only (maybe other server need other method?) if status == 200: print 'Status: 200' elif status == 403: print 'Status: 403' elif status == 404: print 'Status: 404' if len(header) >= 1: for h, v in header.iteritems(): print h + ': ' + v print if not response is None and status == 200: # send file if 'file' in response and isinstance(response['file'], file): print response['file'].read() response['file'].close() # output json else: print json.dumps(response, indent = True) ## logging #import sys #log = open('/home/troex/Sites/git/elfinder/files/out.log', 'w') #print >>log, 'FORM: ', form #log.close() ## another aproach ## get connector output and print it out #if elf.httpStatusCode == 200: # print 'HTTP/1.1 200 OK' #elif elf.httpStatusCode == 403: # print 'HTTP/1.x 403 Access Denied' #elif elf.httpStatusCode == 404: # print 'HTTP/1.x 404 Not Found' # #if len(elf.httpHeader) >= 1: # for header, value in elf.httpHeader.iteritems(): # print header + ': ' + value # print # #if not elf.httpResponse is None: # if isinstance(elf.httpResponse['file'], file): # print elf.httpResponse['file'].read() # elf.httpResponse['file'].close() # else: # print json.dumps(elf.httpResponse, indent = True) #
gpl-2.0
Ensembles/ert
python/python/ert/enkf/plot_data/ensemble_plot_gen_kw_vector.py
2
1627
# Copyright (C) 2014 Statoil ASA, Norway. # # The file 'ensemble_plot_gen_kw_vector.py' is part of ERT - Ensemble based Reservoir Tool. # # ERT is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ERT is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. # # See the GNU General Public License at <http://www.gnu.org/licenses/gpl.html> # for more details. from cwrap import BaseCClass from ert.enkf import EnkfPrototype class EnsemblePlotGenKWVector(BaseCClass): TYPE_NAME = "ensemble_plot_gen_kw_vector" _size = EnkfPrototype("int enkf_plot_gen_kw_vector_get_size(ensemble_plot_gen_kw_vector)") _get_value = EnkfPrototype("double enkf_plot_gen_kw_vector_iget(ensemble_plot_gen_kw_vector, int)") def __init__(self): raise NotImplementedError("Class can not be instantiated directly!") def __len__(self): """ @rtype: int """ return self._size() def getValue(self, index): """ @rtype: float """ return self[index] def __iter__(self): cur = 0 while cur < len(self): yield self[cur] cur += 1 def __getitem__(self, index): """ @rtype: float """ return self._get_value(index) def __repr__(self): return 'EnsemblePlotGenKWVector(size = %d) %s' % (len(self), self._ad_str())
gpl-3.0
nrwahl2/ansible
lib/ansible/modules/network/cloudengine/ce_vrf_interface.py
27
15550
#!/usr/bin/python # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: ce_vrf_interface version_added: "2.4" short_description: Manages interface specific VPN configuration on HUAWEI CloudEngine switches. description: - Manages interface specific VPN configuration of HUAWEI CloudEngine switches. author: Zhijin Zhou (@CloudEngine-Ansible) notes: - Ensure that a VPN instance has been created and the IPv4 address family has been enabled for the VPN instance. options: vrf: description: - VPN instance, the length of vrf name is 1 ~ 31, i.e. "test", but can not be C(_public_). required: true vpn_interface: description: - An interface that can binding VPN instance, i.e. 40GE1/0/22, Vlanif10. Must be fully qualified interface name. Interface types, such as 10GE, 40GE, 100GE, LoopBack, MEth, Tunnel, Vlanif.... required: true state: description: - Manage the state of the resource. required: false choices: ['present','absent'] default: present ''' EXAMPLES = ''' - name: VRF interface test hosts: cloudengine connection: local gather_facts: no vars: cli: host: "{{ inventory_hostname }}" port: "{{ ansible_ssh_port }}" username: "{{ username }}" password: "{{ password }}" transport: cli tasks: - name: "Configure a VPN instance for the interface" ce_vrf_interface: vpn_interface: 40GE1/0/2 vrf: test state: present provider: "{{ cli }}" - name: "Disable the association between a VPN instance and an interface" ce_vrf_interface: vpn_interface: 40GE1/0/2 vrf: test state: absent provider: "{{ cli }}" ''' RETURN = ''' proposed: description: k/v pairs of parameters passed into module returned: verbose mode type: dict sample: { "state": "present", "vpn_interface": "40GE2/0/17", "vrf": "jss" } existing: description: k/v pairs of existing attributes on the interface returned: verbose mode type: dict sample: { "vpn_interface": "40GE2/0/17", "vrf": null } end_state: description: k/v pairs of end attributes on the interface returned: verbose mode type: dict sample: { "vpn_interface": "40GE2/0/17", "vrf": "jss" } updates: description: command list sent to the device returned: always type: list sample: [ "ip binding vpn-instance jss", ] changed: description: check to see if a change was made on the device returned: always type: boolean sample: true ''' from xml.etree import ElementTree from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ce import ce_argument_spec, get_nc_config, set_nc_config CE_NC_GET_VRF = """ <filter type="subtree"> <l3vpn xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0"> <l3vpncomm> <l3vpnInstances> <l3vpnInstance> <vrfName>%s</vrfName> </l3vpnInstance> </l3vpnInstances> </l3vpncomm> </l3vpn> </filter> """ CE_NC_GET_VRF_INTERFACE = """ <filter type="subtree"> <l3vpn xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0"> <l3vpncomm> <l3vpnInstances> <l3vpnInstance> <vrfName></vrfName> <l3vpnIfs> <l3vpnIf> <ifName></ifName> </l3vpnIf> </l3vpnIfs> </l3vpnInstance> </l3vpnInstances> </l3vpncomm> </l3vpn> </filter> """ CE_NC_MERGE_VRF_INTERFACE = """ <config> <l3vpn xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0"> <l3vpncomm> <l3vpnInstances> <l3vpnInstance> <vrfName>%s</vrfName> <l3vpnIfs> <l3vpnIf operation="merge"> <ifName>%s</ifName> </l3vpnIf> </l3vpnIfs> </l3vpnInstance> </l3vpnInstances> </l3vpncomm> </l3vpn> </config> """ CE_NC_GET_INTF = """ <filter type="subtree"> <ifm xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0"> <interfaces> <interface> <ifName>%s</ifName> <isL2SwitchPort></isL2SwitchPort> </interface> </interfaces> </ifm> </filter> """ CE_NC_DEL_INTF_VPN = """ <config> <l3vpn xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0"> <l3vpncomm> <l3vpnInstances> <l3vpnInstance> <vrfName>%s</vrfName> <l3vpnIfs> <l3vpnIf operation="delete"> <ifName>%s</ifName> </l3vpnIf> </l3vpnIfs> </l3vpnInstance> </l3vpnInstances> </l3vpncomm> </l3vpn> </config> """ def get_interface_type(interface): """Gets the type of interface, such as 10GE, ETH-TRUNK, VLANIF...""" if interface is None: return None iftype = None if interface.upper().startswith('GE'): iftype = 'ge' elif interface.upper().startswith('10GE'): iftype = '10ge' elif interface.upper().startswith('25GE'): iftype = '25ge' elif interface.upper().startswith('4X10GE'): iftype = '4x10ge' elif interface.upper().startswith('40GE'): iftype = '40ge' elif interface.upper().startswith('100GE'): iftype = '100ge' elif interface.upper().startswith('VLANIF'): iftype = 'vlanif' elif interface.upper().startswith('LOOPBACK'): iftype = 'loopback' elif interface.upper().startswith('METH'): iftype = 'meth' elif interface.upper().startswith('ETH-TRUNK'): iftype = 'eth-trunk' elif interface.upper().startswith('VBDIF'): iftype = 'vbdif' elif interface.upper().startswith('NVE'): iftype = 'nve' elif interface.upper().startswith('TUNNEL'): iftype = 'tunnel' elif interface.upper().startswith('ETHERNET'): iftype = 'ethernet' elif interface.upper().startswith('FCOE-PORT'): iftype = 'fcoe-port' elif interface.upper().startswith('FABRIC-PORT'): iftype = 'fabric-port' elif interface.upper().startswith('STACK-PORT'): iftype = 'stack-Port' elif interface.upper().startswith('NULL'): iftype = 'null' else: return None return iftype.lower() class VrfInterface(object): """Manange vpn instance""" def __init__(self, argument_spec): self.spec = argument_spec self.module = None self.init_module() # vpn instance info self.vrf = self.module.params['vrf'] self.vpn_interface = self.module.params['vpn_interface'] self.vpn_interface = self.vpn_interface.upper().replace(' ', '') self.state = self.module.params['state'] self.intf_info = dict() self.intf_info['isL2SwitchPort'] = None self.intf_info['vrfName'] = None self.conf_exist = False # state self.changed = False self.updates_cmd = list() self.results = dict() self.proposed = dict() self.existing = dict() self.end_state = dict() def init_module(self): """init_module""" required_one_of = [("vrf", "vpn_interface")] self.module = AnsibleModule( argument_spec=self.spec, required_one_of=required_one_of, supports_check_mode=True) def check_response(self, xml_str, xml_name): """Check if response message is already succeed.""" if "<ok/>" not in xml_str: self.module.fail_json(msg='Error: %s failed.' % xml_name) def get_update_cmd(self): """ get updated command""" if self.conf_exist: return if self.state == 'absent': self.updates_cmd.append( "undo ip binding vpn-instance %s" % self.vrf) return if self.vrf != self.intf_info['vrfName']: self.updates_cmd.append("ip binding vpn-instance %s" % self.vrf) return def check_params(self): """Check all input params""" if not self.is_vrf_exist(): self.module.fail_json( msg='Error: The VPN instance is not existed.') if self.state == 'absent': if self.vrf != self.intf_info['vrfName']: self.module.fail_json( msg='Error: The VPN instance is not bound to the interface.') if self.intf_info['isL2SwitchPort'] == 'true': self.module.fail_json( msg='Error: L2Switch Port can not binding a VPN instance.') # interface type check if self.vpn_interface: intf_type = get_interface_type(self.vpn_interface) if not intf_type: self.module.fail_json( msg='Error: interface name of %s' ' is error.' % self.vpn_interface) # vrf check if self.vrf == '_public_': self.module.fail_json( msg='Error: The vrf name _public_ is reserved.') if len(self.vrf) < 1 or len(self.vrf) > 31: self.module.fail_json( msg='Error: The vrf name length must be between 1 and 31.') def get_interface_vpn_name(self, vpninfo, vpn_name): """ get vpn instance name""" l3vpn_if = vpninfo.findall("l3vpnIf") for l3vpn_ifinfo in l3vpn_if: for ele in l3vpn_ifinfo: if ele.tag in ['ifName']: if ele.text == self.vpn_interface: self.intf_info['vrfName'] = vpn_name def get_interface_vpn(self): """ get the VPN instance associated with the interface""" xml_str = CE_NC_GET_VRF_INTERFACE con_obj = get_nc_config(self.module, xml_str) if "<data/>" in con_obj: return xml_str = con_obj.replace('\r', '').replace('\n', '').\ replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\ replace('xmlns="http://www.huawei.com/netconf/vrp"', "") # get global vrf interface info root = ElementTree.fromstring(xml_str) vpns = root.findall( "data/l3vpn/l3vpncomm/l3vpnInstances/l3vpnInstance") if vpns: for vpnele in vpns: vpn_name = None for vpninfo in vpnele: if vpninfo.tag == 'vrfName': vpn_name = vpninfo.text if vpninfo.tag == 'l3vpnIfs': self.get_interface_vpn_name(vpninfo, vpn_name) return def is_vrf_exist(self): """ judge whether the VPN instance is existed""" conf_str = CE_NC_GET_VRF % self.vrf con_obj = get_nc_config(self.module, conf_str) if "<data/>" in con_obj: return False return True def get_intf_conf_info(self): """ get related configuration of the interface""" conf_str = CE_NC_GET_INTF % self.vpn_interface con_obj = get_nc_config(self.module, conf_str) if "<data/>" in con_obj: return # get interface base info xml_str = con_obj.replace('\r', '').replace('\n', '').\ replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\ replace('xmlns="http://www.huawei.com/netconf/vrp"', "") root = ElementTree.fromstring(xml_str) interface = root.find("data/ifm/interfaces/interface") if interface: for eles in interface: if eles.tag in ["isL2SwitchPort"]: self.intf_info[eles.tag] = eles.text self.get_interface_vpn() return def get_existing(self): """get existing config""" self.existing = dict(vrf=self.intf_info['vrfName'], vpn_interface=self.vpn_interface) def get_proposed(self): """get_proposed""" self.proposed = dict(vrf=self.vrf, vpn_interface=self.vpn_interface, state=self.state) def get_end_state(self): """get_end_state""" self.intf_info['vrfName'] = None self.get_intf_conf_info() self.end_state = dict(vrf=self.intf_info['vrfName'], vpn_interface=self.vpn_interface) def show_result(self): """ show result""" self.results['changed'] = self.changed self.results['proposed'] = self.proposed self.results['existing'] = self.existing self.results['end_state'] = self.end_state if self.changed: self.results['updates'] = self.updates_cmd else: self.results['updates'] = list() self.module.exit_json(**self.results) def judge_if_config_exist(self): """ judge whether configuration has existed""" if self.state == 'absent': return False delta = set(self.proposed.items()).difference( self.existing.items()) delta = dict(delta) if len(delta) == 1 and delta['state']: return True return False def config_interface_vrf(self): """ configure VPN instance of the interface""" if not self.conf_exist and self.state == 'present': xml_str = CE_NC_MERGE_VRF_INTERFACE % ( self.vrf, self.vpn_interface) ret_xml = set_nc_config(self.module, xml_str) self.check_response(ret_xml, "VRF_INTERFACE_CONFIG") self.changed = True elif self.state == 'absent': xml_str = CE_NC_DEL_INTF_VPN % (self.vrf, self.vpn_interface) ret_xml = set_nc_config(self.module, xml_str) self.check_response(ret_xml, "DEL_VRF_INTERFACE_CONFIG") self.changed = True def work(self): """excute task""" self.get_intf_conf_info() self.check_params() self.get_existing() self.get_proposed() self.conf_exist = self.judge_if_config_exist() self.config_interface_vrf() self.get_update_cmd() self.get_end_state() self.show_result() def main(): """main""" argument_spec = dict( vrf=dict(required=True, type='str'), vpn_interface=dict(required=True, type='str'), state=dict(choices=['absent', 'present'], default='present', required=False), ) argument_spec.update(ce_argument_spec) vrf_intf = VrfInterface(argument_spec) vrf_intf.work() if __name__ == '__main__': main()
gpl-3.0
codedsk/hubcheck-hubzero-tests
hchztests/tests/test_website_support_need_help.py
1
7124
import pytest import sys import os import re import hubcheck pytestmark = [ pytest.mark.website, pytest.mark.tickets, pytest.mark.need_help, pytest.mark.reboot, pytest.mark.upgrade, pytest.mark.prod_safe_upgrade ] class TestNeedHelp(hubcheck.testcase.TestCase2): def setup_method(self,method): # setup a web browser self.browser.get(self.https_authority) # get user account info self.username,self.password = \ self.testdata.find_account_for('ticketsubmitter') self.adminuser,self.adminpass = \ self.testdata.find_account_for('ticketmanager') self.ticket_number = None def teardown_method(self,method): # if we created a ticket, delete the ticket if self.ticket_number is not None \ and (self.adminuser != "") \ and (self.adminpass != ""): try: self.utils.account.logout() except: pass self.utils.account.login_as(self.adminuser,self.adminpass) self.utils.support.close_support_ticket_invalid(self.ticket_number) def test_link_exists(self): """ click the need help link, to see if the widget exists """ po = self.catalog.load_pageobject('SupportNeedHelpPage') po.open() po.close() @pytest.mark.nt def test_link_changes_webpage(self): """ click the need help link, check if the url changes """ po = self.catalog.load_pageobject('GenericPage') start_url = po.current_url() po.toggle_needhelp() end_url = po.current_url() assert start_url == end_url, "clicking the 'Need Help?' link" \ + " changed the web page from %s to %s" % (start_url,end_url) def test_if_link_leads_to_support_url(self): """ open the "Need Help?" dialogue to ensure it does not lead to /support Sometime found when javascript is turned off, but if javascript is on, clicking this link should not send the user to the /support webpage. """ # store the start and end page url's for comparison # click the needhelp link and see if it takes us to /support po = self.catalog.load_pageobject('SupportNeedHelpPage') startpageurl = po.current_url() po.open() endpageurl = po.current_url() assert startpageurl == endpageurl, \ "User was redirected to %s\n" % endpageurl # FIXME: use urlparse here # create a pattern for a url regular expression p = re.compile('(([^:]+)://)?([^:/]+)(:([0-9]+))?(/.*)?') (junk, junk, junk, junk, junk, path) = p.search(endpageurl).groups() # check that the page we were taken to is not /support s = "pageurl = %s\npath = %s\n" % (endpageurl,path) assert path != '/support', s def test_submit_ticket_logged_in_using_need_help_link(self): """ login to the website as the "ticket submitter" and submit a ticket using the need help link. """ problem = 'hubcheck test ticket\n%s' % (self.fnbase) # login to the website and click the need help link self.utils.account.login_as(self.username,self.password) po = self.catalog.load_pageobject('SupportNeedHelpPage') po.open() # fill in the trouble report # username, name, and email fields are # not accessible while logged in self.ticket_number = po.submit_ticket({'problem':problem}) # check if the ticket number is a valid number assert self.ticket_number is not None, "no ticket number returned" assert re.match('\d+',self.ticket_number) is not None, \ "cound not find a matching ticket number in '%s'" \ % (self.ticket_number) # convert to a number and ensure it is not ticket #0 assert int(self.ticket_number) > 0, \ "invalid ticket number returned: %s" % (self.ticket_number) @pytest.mark.captcha def test_submit_ticket_logged_out_using_need_help_link(self): """ submit a support ticket using the need help link while not logged into the website. """ # data for trouble report data = { 'name' : 'hubcheck testuser', 'email' : '[email protected]', 'problem' : 'hubcheck test ticket\n%s' % (self.fnbase), 'captcha' : True, } # navigate to the SupportNeedHelp Page: po = self.catalog.load_pageobject('SupportNeedHelpPage') po.open() # fill in the trouble report # username is optional self.ticket_number = po.submit_ticket(data) # check if the ticket number is a valid number assert self.ticket_number is not None, \ "no ticket number returned" assert re.match('\d+',self.ticket_number) is not None, \ "cound not find a matching ticket number in '%s'" \ % (self.ticket_number) # convert to a number and ensure it is not ticket #0 assert int(self.ticket_number) > 0, \ "invalid ticket number returned: %s" % (self.ticket_number) @pytest.mark.tickets_attach_jpg def test_attaching_jpg_image_to_ticket_submitted_through_need_help(self): """ Login to the website and submit a ticket, using the need help link, with an attached jpeg image. """ problem = 'hubcheck test ticket\nattaching jpg image\n%s' \ % (self.fnbase) uploadfilename = 'app2.jpg' uploadfilepath = os.path.join(self.datadir,'images',uploadfilename) data = { 'problem' : problem, 'upload' : uploadfilepath, } # login to the website and navigate to the need help form self.utils.account.login_as(self.username,self.password) po = self.catalog.load_pageobject('SupportNeedHelpPage') # po.open() po.needhelplink.click() # submit a trouble report # username, name, and email fields are not accessible self.ticket_number = po.submit_ticket(data) assert self.ticket_number is not None, "no ticket number returned" assert int(self.ticket_number) > 0, \ "invalid ticket number returned: %s" % (self.ticket_number) po.goto_ticket() po = self.catalog.load_pageobject('SupportTicketViewPage') content = po.get_ticket_content() imgsrc = content.download_image(uploadfilename) # not sure how to really download image files yet. # so we assume that as long as opening the image didn't # cause an error, the test passed. assert re.search(uploadfilename,imgsrc) is not None, \ "After uploading an image to support ticket" \ + " #%s, could not download image %s" \ % (self.ticket_number,uploadfilename)
mit
KaranToor/MA450
google-cloud-sdk/.install/.backup/platform/gsutil/third_party/boto/tests/unit/ec2/test_address.py
109
5950
from tests.compat import mock, unittest from boto.ec2.address import Address class AddressTest(unittest.TestCase): def setUp(self): self.address = Address() self.address.connection = mock.Mock() self.address.public_ip = "192.168.1.1" def check_that_attribute_has_been_set(self, name, value, attribute): self.address.endElement(name, value, None) self.assertEqual(getattr(self.address, attribute), value) def test_endElement_sets_correct_attributes_with_values(self): for arguments in [("publicIp", "192.168.1.1", "public_ip"), ("instanceId", 1, "instance_id"), ("domain", "some domain", "domain"), ("allocationId", 1, "allocation_id"), ("associationId", 1, "association_id"), ("somethingRandom", "somethingRandom", "somethingRandom")]: self.check_that_attribute_has_been_set(arguments[0], arguments[1], arguments[2]) def test_release_calls_connection_release_address_with_correct_args(self): self.address.release() self.address.connection.release_address.assert_called_with( public_ip="192.168.1.1", dry_run=False ) def test_associate_calls_connection_associate_address_with_correct_args(self): self.address.associate(1) self.address.connection.associate_address.assert_called_with( instance_id=1, public_ip="192.168.1.1", allow_reassociation=False, network_interface_id=None, private_ip_address=None, dry_run=False ) def test_disassociate_calls_connection_disassociate_address_with_correct_args(self): self.address.disassociate() self.address.connection.disassociate_address.assert_called_with( public_ip="192.168.1.1", dry_run=False ) class AddressWithAllocationTest(unittest.TestCase): def setUp(self): self.address = Address() self.address.connection = mock.Mock() self.address.public_ip = "192.168.1.1" self.address.allocation_id = "aid1" def check_that_attribute_has_been_set(self, name, value, attribute): self.address.endElement(name, value, None) self.assertEqual(getattr(self.address, attribute), value) def test_endElement_sets_correct_attributes_with_values(self): for arguments in [("publicIp", "192.168.1.1", "public_ip"), ("instanceId", 1, "instance_id"), ("domain", "some domain", "domain"), ("allocationId", 1, "allocation_id"), ("associationId", 1, "association_id"), ("somethingRandom", "somethingRandom", "somethingRandom")]: self.check_that_attribute_has_been_set(arguments[0], arguments[1], arguments[2]) def test_release_calls_connection_release_address_with_correct_args(self): self.address.release() self.address.connection.release_address.assert_called_with( allocation_id="aid1", dry_run=False ) def test_associate_calls_connection_associate_address_with_correct_args(self): self.address.associate(1) self.address.connection.associate_address.assert_called_with( instance_id=1, public_ip="192.168.1.1", allocation_id="aid1", network_interface_id=None, private_ip_address=None, allow_reassociation=False, dry_run=False ) def test_disassociate_calls_connection_disassociate_address_with_correct_args(self): self.address.disassociate() self.address.connection.disassociate_address.assert_called_with( public_ip="192.168.1.1", dry_run=False ) class AddressWithNetworkInterfaceTest(unittest.TestCase): def setUp(self): self.address = Address() self.address.connection = mock.Mock() self.address.public_ip = "192.168.1.1" self.address.allocation_id = "aid1" def check_that_attribute_has_been_set(self, name, value, attribute): self.address.endElement(name, value, None) self.assertEqual(getattr(self.address, attribute), value) def test_endElement_sets_correct_attributes_with_values(self): for arguments in [("publicIp", "192.168.1.1", "public_ip"), ("instanceId", 1, "instance_id"), ("domain", "some domain", "domain"), ("allocationId", 1, "allocation_id"), ("associationId", 1, "association_id"), ("somethingRandom", "somethingRandom", "somethingRandom")]: self.check_that_attribute_has_been_set(arguments[0], arguments[1], arguments[2]) def test_release_calls_connection_release_address_with_correct_args(self): self.address.release() self.address.connection.release_address.assert_called_with( allocation_id="aid1", dry_run=False ) def test_associate_calls_connection_associate_address_with_correct_args(self): self.address.associate(network_interface_id=1) self.address.connection.associate_address.assert_called_with( instance_id=None, public_ip="192.168.1.1", network_interface_id=1, private_ip_address=None, allocation_id="aid1", allow_reassociation=False, dry_run=False ) def test_disassociate_calls_connection_disassociate_address_with_correct_args(self): self.address.disassociate() self.address.connection.disassociate_address.assert_called_with( public_ip="192.168.1.1", dry_run=False ) if __name__ == "__main__": unittest.main()
apache-2.0
YongseopKim/crosswalk-test-suite
tools/apkanalyser/comm.py
3
2140
# -*- coding: utf-8 -*- #!/usr/bin/env python # Copyright (C) 2015 Intel Corporation. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # * Neither the name of Intel Corporation nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Author: [email protected] import os, sys import re, codecs import shutil, glob def _find(pathname, matchFunc=os.path.isfile): for dirname in sys.path: candidate = os.path.join(dirname, pathname) if matchFunc(candidate): return candidate def mk_dir(path): if not find_dir(path): os.mkdir(path) def find_file(pathname): return _find(pathname) def find_dir(path): return _find(path, matchFunc=os.path.isdir) def find_glob_path(filepath): return glob.glob(filepath)
bsd-3-clause
jasper-meyer/Platformer
platformer.py
1
3751
""" platformer.py Author: Jasper Meyer Credit: You, the internet, Brendan Assignment: Write and submit a program that implements the sandbox platformer game: https://github.com/HHS-IntroProgramming/Platformer """ from ggame import App, RectangleAsset, ImageAsset, Sprite, LineStyle, Color, Frame SCREEN_WIDTH = 1080 SCREEN_HEIGHT = 720 myapp = App(SCREEN_WIDTH, SCREEN_HEIGHT) black = Color(0, 1) backcol = Color(0xd9ffcc, 1.0) purp = Color(0x9900cc, 1.0) blue = Color(0x3399ff,1.0) noline = LineStyle(0, black) bg_asset = RectangleAsset(SCREEN_WIDTH, SCREEN_HEIGHT, noline, backcol) bg = Sprite(bg_asset, (0,0)) thinline = LineStyle(1, black) sq = RectangleAsset (75,75, noline, black) wub=0 pup=0 mousex=0 mousey=0 mousexround=0 mouseyround=0 play = RectangleAsset (25,50, noline, purp) spr = RectangleAsset (20,10, noline, blue) vy=0 player=0 acc = 0 ti = 0 rupx=0 lupx=0 vx=0 up=0 upup=0 stop = 0 shutup=0 spring = 0 sub = 0 springlist = [] def wup(event): global wub global mousexround global mouseyround wub = 1 if wub == 1: mousexround=mousex-((mousex)%75) mouseyround=mousey-((mousey)%75) block = Sprite (sq, (mousexround, mouseyround)) def mousemo(event): global mousex global mousey mousex=event.x mousey=event.y def spri(event): global spring global mousex global mousey global mouseyround global sub global springlist sub =1 if sub == 1: mouseyround=mousey-((mousey)%75)+65 springlist.append (Sprite (spr, (mousex, mouseyround))) def pup(event): global pub global mousex global mouseyround global player pub = 1 if pub == 1: mouseyround=mousey-((mousey)%75)+25 if player == 0: player = Sprite (play, (mousex, mouseyround)) def rup(event): global rupx rupx=1 def lup(event): global lupx lupx=1 def uup(event): global up up=1 def step(): if player != 0: global vy global acc global ti global rupx global vx global lupx global up global upup global stop global shutup global springlist global player acc = 0.02 for s in springlist: if player.collidingWith(s): vy=-50+vy vx=-vx if stop == 0: ti=ti+.5 if upup==4.5: vy = (0.2*ti)-upup else: vy = (0.2*ti) player.y=player.y+vy player.x=player.x+vx if rupx == 1: vx=vx+1.5 lupx=0 rupx=0 if lupx == 1: vx=vx-1.5 rupx=0 lupx=0 if vx > 3: vx = 3 if vx < -3: vx =-3 if up == 1: upup = 4.5 up=0 if up == 0: upup =4.5 col = player.collidingWithSprites(Sprite) if len(col) > 1 and col[1].y<player.y+500: stop=1 player.y=player.y-0.2 else: stop=0 if stop == 1: vy=0 ti=0 if len(col) > 1: if col[1].y<player.y+50: vx=-0.5*vx if player.y > 2000: player = 0 ti=0 myapp.listenKeyEvent('keyup', 's', spri) myapp.listenKeyEvent('keydown', 'up arrow', uup) myapp.listenKeyEvent('keydown', 'left arrow', lup) myapp.listenKeyEvent('keydown', 'right arrow', rup) myapp.listenKeyEvent('keyup', 'p', pup) myapp.listenKeyEvent('keyup', 'w', wup) myapp.listenMouseEvent('mousemove', mousemo) myapp.run(step)
mit
xpansa/server-tools
fetchmail_attach_from_folder/match_algorithm/__init__.py
54
1115
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # This module copyright (C) 2013 Therp BV (<http://therp.nl>) # All Rights Reserved # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from . import base from . import email_exact from . import email_domain from . import openerp_standard
agpl-3.0
simplegeo/eventlet
eventlet/hubs/pyevent.py
13
5455
import sys import traceback import event import types from eventlet.support import greenlets as greenlet from eventlet.hubs.hub import BaseHub, FdListener, READ, WRITE class event_wrapper(object): def __init__(self, impl=None, seconds=None): self.impl = impl self.seconds = seconds def __repr__(self): if self.impl is not None: return repr(self.impl) else: return object.__repr__(self) def __str__(self): if self.impl is not None: return str(self.impl) else: return object.__str__(self) def cancel(self): if self.impl is not None: self.impl.delete() self.impl = None @property def pending(self): return bool(self.impl and self.impl.pending()) class Hub(BaseHub): SYSTEM_EXCEPTIONS = (KeyboardInterrupt, SystemExit) def __init__(self): super(Hub,self).__init__() event.init() self.signal_exc_info = None self.signal( 2, lambda signalnum, frame: self.greenlet.parent.throw(KeyboardInterrupt)) self.events_to_add = [] def dispatch(self): loop = event.loop while True: for e in self.events_to_add: if e is not None and e.impl is not None and e.seconds is not None: e.impl.add(e.seconds) e.seconds = None self.events_to_add = [] result = loop() if getattr(event, '__event_exc', None) is not None: # only have to do this because of bug in event.loop t = getattr(event, '__event_exc') setattr(event, '__event_exc', None) assert getattr(event, '__event_exc') is None raise t[0], t[1], t[2] if result != 0: return result def run(self): while True: try: self.dispatch() except greenlet.GreenletExit: break except self.SYSTEM_EXCEPTIONS: raise except: if self.signal_exc_info is not None: self.schedule_call_global( 0, greenlet.getcurrent().parent.throw, *self.signal_exc_info) self.signal_exc_info = None else: self.squelch_timer_exception(None, sys.exc_info()) def abort(self, wait=True): self.schedule_call_global(0, self.greenlet.throw, greenlet.GreenletExit) if wait: assert self.greenlet is not greenlet.getcurrent(), "Can't abort with wait from inside the hub's greenlet." self.switch() def _getrunning(self): return bool(self.greenlet) def _setrunning(self, value): pass # exists for compatibility with BaseHub running = property(_getrunning, _setrunning) def add(self, evtype, fileno, real_cb): # this is stupid: pyevent won't call a callback unless it's a function, # so we have to force it to be one here if isinstance(real_cb, types.BuiltinMethodType): def cb(_d): real_cb(_d) else: cb = real_cb if evtype is READ: evt = event.read(fileno, cb, fileno) elif evtype is WRITE: evt = event.write(fileno, cb, fileno) return super(Hub,self).add(evtype, fileno, evt) def signal(self, signalnum, handler): def wrapper(): try: handler(signalnum, None) except: self.signal_exc_info = sys.exc_info() event.abort() return event_wrapper(event.signal(signalnum, wrapper)) def remove(self, listener): super(Hub, self).remove(listener) listener.cb.delete() def remove_descriptor(self, fileno): for lcontainer in self.listeners.itervalues(): listener = lcontainer.pop(fileno, None) if listener: try: listener.cb.delete() except self.SYSTEM_EXCEPTIONS: raise except: traceback.print_exc() def schedule_call_local(self, seconds, cb, *args, **kwargs): current = greenlet.getcurrent() if current is self.greenlet: return self.schedule_call_global(seconds, cb, *args, **kwargs) event_impl = event.event(_scheduled_call_local, (cb, args, kwargs, current)) wrapper = event_wrapper(event_impl, seconds=seconds) self.events_to_add.append(wrapper) return wrapper schedule_call = schedule_call_local def schedule_call_global(self, seconds, cb, *args, **kwargs): event_impl = event.event(_scheduled_call, (cb, args, kwargs)) wrapper = event_wrapper(event_impl, seconds=seconds) self.events_to_add.append(wrapper) return wrapper def _version_info(self): baseversion = event.__version__ return baseversion def _scheduled_call(event_impl, handle, evtype, arg): cb, args, kwargs = arg try: cb(*args, **kwargs) finally: event_impl.delete() def _scheduled_call_local(event_impl, handle, evtype, arg): cb, args, kwargs, caller_greenlet = arg try: if not caller_greenlet.dead: cb(*args, **kwargs) finally: event_impl.delete()
mit
sourcelair/ceryx
ceryx/tests/client/connection.py
2
1957
from urllib3.connection import HTTPConnection, HTTPSConnection import os import socket DEFAULT_CERYX_HOST = "ceryx" # Set by Docker Compose in tests CERYX_HOST = os.getenv("CERYX_HOST", DEFAULT_CERYX_HOST) class CeryxTestsHTTPConnection(HTTPConnection): """ Custom-built HTTPConnection for Ceryx tests. Force sets the request's host to the configured Ceryx host, if the request's original host ends with `.ceryx.test`. """ @property def host(self): """ Do what the original property did. We just want to touch the setter. """ return self._dns_host.rstrip('.') @host.setter def host(self, value): """ If the request header ends with `.ceryx.test` then force set the actual host to the configured Ceryx host, so as to send corresponding requests to Ceryx. """ self._dns_host = CERYX_HOST if value.endswith(".ceryx.test") else value class CeryxTestsHTTPSConnection(CeryxTestsHTTPConnection, HTTPSConnection): def __init__( self, host, port=None, key_file=None, cert_file=None, key_password=None, strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, ssl_context=None, server_hostname=None, **kw, ): # Initialise the HTTPConnection subclass created above. CeryxTestsHTTPConnection.__init__( self, host, port, strict=strict, timeout=timeout, **kw, ) self.key_file = key_file self.cert_file = cert_file self.key_password = key_password self.ssl_context = ssl_context self.server_hostname = server_hostname # ------------------------------ # Original comment from upstream # ------------------------------ # # Required property for Google AppEngine 1.9.0 which otherwise causes # HTTPS requests to go out as HTTP. (See Issue #356) self._protocol = 'https'
mit
Ophiuchus1312/enigma2-master
lib/python/Screens/TimerEdit.py
1
20176
from Components.ActionMap import ActionMap from Components.Button import Button from Components.Label import Label from Components.config import config from Components.MenuList import MenuList from Components.TimerList import TimerList from Components.TimerSanityCheck import TimerSanityCheck from Components.UsageConfig import preferredTimerPath from Components.Sources.StaticText import StaticText from RecordTimer import RecordTimerEntry, parseEvent, AFTEREVENT from Screens.Screen import Screen from Screens.ChoiceBox import ChoiceBox from Screens.MessageBox import MessageBox from ServiceReference import ServiceReference from Screens.TimerEntry import TimerEntry, TimerLog from Tools.BoundFunction import boundFunction from Tools.FuzzyDate import FuzzyTime from Tools.Directories import resolveFilename, SCOPE_HDD from time import time, localtime from timer import TimerEntry as RealTimerEntry from enigma import eServiceCenter import Tools.CopyFiles import os class TimerEditList(Screen): EMPTY = 0 ENABLE = 1 DISABLE = 2 CLEANUP = 3 DELETE = 4 def __init__(self, session): Screen.__init__(self, session) Screen.setTitle(self, _("Timer List")) self.onChangedEntry = [ ] list = [ ] self.list = list self.fillTimerList() self["timerlist"] = TimerList(list) self.key_red_choice = self.EMPTY self.key_yellow_choice = self.EMPTY self.key_blue_choice = self.EMPTY self["key_red"] = Button(" ") self["key_green"] = Button(_("Add")) self["key_yellow"] = Button(" ") self["key_blue"] = Button(" ") self["description"] = Label() self["actions"] = ActionMap(["OkCancelActions", "DirectionActions", "ShortcutActions", "TimerEditActions"], { "ok": self.openEdit, "cancel": self.leave, "green": self.addCurrentTimer, "log": self.showLog, "left": self.left, "right": self.right, "up": self.up, "down": self.down }, -1) self.setTitle(_("Timer overview")) self.session.nav.RecordTimer.on_state_change.append(self.onStateChange) self.onShown.append(self.updateState) def createSummary(self): return TimerEditListSummary def up(self): self["timerlist"].instance.moveSelection(self["timerlist"].instance.moveUp) self.updateState() def down(self): self["timerlist"].instance.moveSelection(self["timerlist"].instance.moveDown) self.updateState() def left(self): self["timerlist"].instance.moveSelection(self["timerlist"].instance.pageUp) self.updateState() def right(self): self["timerlist"].instance.moveSelection(self["timerlist"].instance.pageDown) self.updateState() def toggleDisabledState(self): cur=self["timerlist"].getCurrent() if cur: t = cur if t.disabled: # print "try to ENABLE timer" t.enable() timersanitycheck = TimerSanityCheck(self.session.nav.RecordTimer.timer_list, cur) if not timersanitycheck.check(): t.disable() print "Sanity check failed" simulTimerList = timersanitycheck.getSimulTimerList() if simulTimerList is not None: self.session.openWithCallback(self.finishedEdit, TimerSanityConflict, simulTimerList) else: print "Sanity check passed" if timersanitycheck.doubleCheck(): t.disable() else: if t.isRunning(): if t.repeated: list = ( (_("Stop current event but not coming events"), "stoponlycurrent"), (_("Stop current event and disable coming events"), "stopall"), (_("Don't stop current event but disable coming events"), "stoponlycoming") ) self.session.openWithCallback(boundFunction(self.runningEventCallback, t), ChoiceBox, title=_("Repeating event currently recording... What do you want to do?"), list = list) else: t.disable() self.session.nav.RecordTimer.timeChanged(t) self.refill() self.updateState() def runningEventCallback(self, t, result): if result is not None: if result[1] == "stoponlycurrent" or result[1] == "stopall": t.enable() t.processRepeated(findRunningEvent = False) self.session.nav.RecordTimer.doActivate(t) if result[1] == "stoponlycoming" or result[1] == "stopall": t.disable() self.session.nav.RecordTimer.timeChanged(t) self.refill() self.updateState() def removeAction(self, descr): actions = self["actions"].actions if descr in actions: del actions[descr] def updateState(self): cur = self["timerlist"].getCurrent() if cur: self["description"].setText(cur.description) if self.key_red_choice != self.DELETE: self["actions"].actions.update({"red":self.removeTimerQuestion}) self["key_red"].setText(_("Delete")) self.key_red_choice = self.DELETE if cur.disabled and (self.key_yellow_choice != self.ENABLE): self["actions"].actions.update({"yellow":self.toggleDisabledState}) self["key_yellow"].setText(_("Enable")) self.key_yellow_choice = self.ENABLE elif cur.isRunning() and not cur.repeated and (self.key_yellow_choice != self.EMPTY): self.removeAction("yellow") self["key_yellow"].setText(" ") self.key_yellow_choice = self.EMPTY elif ((not cur.isRunning())or cur.repeated ) and (not cur.disabled) and (self.key_yellow_choice != self.DISABLE): self["actions"].actions.update({"yellow":self.toggleDisabledState}) self["key_yellow"].setText(_("Disable")) self.key_yellow_choice = self.DISABLE else: if self.key_red_choice != self.EMPTY: self.removeAction("red") self["key_red"].setText(" ") self.key_red_choice = self.EMPTY if self.key_yellow_choice != self.EMPTY: self.removeAction("yellow") self["key_yellow"].setText(" ") self.key_yellow_choice = self.EMPTY showCleanup = True for x in self.list: if (not x[0].disabled) and (x[1] == True): break else: showCleanup = False if showCleanup and (self.key_blue_choice != self.CLEANUP): self["actions"].actions.update({"blue":self.cleanupQuestion}) self["key_blue"].setText(_("Cleanup")) self.key_blue_choice = self.CLEANUP elif (not showCleanup) and (self.key_blue_choice != self.EMPTY): self.removeAction("blue") self["key_blue"].setText(" ") self.key_blue_choice = self.EMPTY if len(self.list) == 0: return timer = self['timerlist'].getCurrent() if timer: try: name = str(timer.name) time = ("%s %s ... %s") % (FuzzyTime(timer.begin)[0], FuzzyTime(timer.begin)[1], FuzzyTime(timer.end)[1]) duration = ("(%d " + _("mins") + ")") % ((timer.end - timer.begin) / 60) service = str(timer.service_ref.getServiceName()) if timer.state == RealTimerEntry.StateWaiting: state = _("waiting") elif timer.state == RealTimerEntry.StatePrepared: state = _("about to start") elif timer.state == RealTimerEntry.StateRunning: if timer.justplay: state = _("zapped") else: state = _("recording...") elif timer.state == RealTimerEntry.StateEnded: state = _("done!") else: state = _("<unknown>") except: name = "" time = "" duration = "" service = "" else: name = "" time = "" duration = "" service = "" for cb in self.onChangedEntry: cb(name, time, duration, service, state) def fillTimerList(self): #helper function to move finished timers to end of list def eol_compare(x, y): if x[0].state != y[0].state and x[0].state == RealTimerEntry.StateEnded or y[0].state == RealTimerEntry.StateEnded: return cmp(x[0].state, y[0].state) return cmp(x[0].begin, y[0].begin) list = self.list print list del list[:] list.extend([(timer, False) for timer in self.session.nav.RecordTimer.timer_list]) list.extend([(timer, True) for timer in self.session.nav.RecordTimer.processed_timers]) if config.usage.timerlist_finished_timer_position.index: #end of list list.sort(cmp = eol_compare) else: list.sort(key = lambda x: x[0].begin) def showLog(self): cur=self["timerlist"].getCurrent() if cur: self.session.openWithCallback(self.finishedEdit, TimerLog, cur) def openEdit(self): cur=self["timerlist"].getCurrent() if cur: self.session.openWithCallback(self.finishedEdit, TimerEntry, cur) def cleanupQuestion(self): self.session.openWithCallback(self.cleanupTimer, MessageBox, _("Really delete done timers?")) def cleanupTimer(self, delete): if delete: self.session.nav.RecordTimer.cleanup() self.refill() self.updateState() def removeTimerQuestion(self): cur = self["timerlist"].getCurrent() service = str(cur.service_ref.getServiceName()) t = localtime(cur.begin) f = str(t.tm_year) + str(t.tm_mon).zfill(2) + str(t.tm_mday).zfill(2) + " " + str(t.tm_hour).zfill(2) + str(t.tm_min).zfill(2) + " - " + service + " - " + cur.name f = f.replace(':','_') f = f.replace(',','_') f = f.replace('/','_') if not cur: return onhdd = False self.moviename = f path = resolveFilename(SCOPE_HDD) files = os.listdir(path) for file in files: if file.startswith(f): onhdd = True break if onhdd: message = (_("Do you really want to delete %s?") % (cur.name)) choices = [(_("No"), "no"), (_("Yes, delete from Timerlist"), "yes"), (_("Yes, delete from Timerlist and delete recording"), "yesremove")] self.session.openWithCallback(self.startDelete, ChoiceBox, title=message, list=choices) else: self.session.openWithCallback(self.removeTimer, MessageBox, _("Do you really want to delete %s?") % (cur.name), default = False) def startDelete(self, answer): if not answer or not answer[1]: self.close() return if answer[1] == 'no': return elif answer[1] == 'yes': self.removeTimer(True) elif answer[1] == 'yesremove': if config.EMC.movie_trashcan_enable.getValue(): trashpath = config.EMC.movie_trashcan_path.getValue() self.MoveToTrash(trashpath) elif config.usage.movielist_trashcan.getValue(): trashpath = resolveFilename(SCOPE_HDD) + '.Trash' self.MoveToTrash(trashpath) else: self.session.openWithCallback(self.callbackRemoveRecording, MessageBox, _("Do you really want to delete the recording?"), default = False) def callbackRemoveRecording(self, answer): if not answer: return self.delete() def removeTimer(self, result): if not result: return list = self["timerlist"] cur = list.getCurrent() if cur: timer = cur timer.afterEvent = AFTEREVENT.NONE self.session.nav.RecordTimer.removeEntry(timer) self.refill() self.updateState() def MoveToTrash(self, trashpath): self.removeTimer(True) moviepath = os.path.normpath(resolveFilename(SCOPE_HDD)) movedList =[] files = os.listdir(moviepath) for file in files: if file.startswith(self.moviename): movedList.append((os.path.join(moviepath, file), os.path.join(trashpath, file))) Tools.CopyFiles.moveFiles(movedList, None) def delete(self): item = self["timerlist"].getCurrent() if item is None: return # huh? name = item.name service = str(item.service_ref.getServiceName()) t = localtime(item.begin) f = str(t.tm_year) + str(t.tm_mon).zfill(2) + str(t.tm_mday).zfill(2) + " " + str(t.tm_hour).zfill(2) + str(t.tm_min).zfill(2) + " - " + service + " - " + name f = f.replace(':','_') f = f.replace(',','_') f = f.replace('/','_') path = resolveFilename(SCOPE_HDD) self.removeTimer(True) from enigma import eBackgroundFileEraser files = os.listdir(path) for file in files: if file.startswith(f): eBackgroundFileEraser.getInstance().erase(os.path.realpath(path + file)) def refill(self): oldsize = len(self.list) self.fillTimerList() lst = self["timerlist"] newsize = len(self.list) if oldsize and oldsize != newsize: idx = lst.getCurrentIndex() lst.entryRemoved(idx) else: lst.invalidate() def addCurrentTimer(self): event = None service = self.session.nav.getCurrentService() if service is not None: info = service.info() if info is not None: event = info.getEvent(0) # FIXME only works if already playing a service serviceref = ServiceReference(self.session.nav.getCurrentlyPlayingServiceOrGroup()) if event is None: data = (int(time()), int(time() + 60), "", "", None) else: data = parseEvent(event, description = False) self.addTimer(RecordTimerEntry(serviceref, checkOldTimers = True, dirname = preferredTimerPath(), *data)) def addTimer(self, timer): self.session.openWithCallback(self.finishedAdd, TimerEntry, timer) def finishedEdit(self, answer): # print "finished edit" if answer[0]: # print "Edited timer" entry = answer[1] timersanitycheck = TimerSanityCheck(self.session.nav.RecordTimer.timer_list, entry) success = False if not timersanitycheck.check(): simulTimerList = timersanitycheck.getSimulTimerList() if simulTimerList is not None: for x in simulTimerList: if x.setAutoincreaseEnd(entry): self.session.nav.RecordTimer.timeChanged(x) if not timersanitycheck.check(): simulTimerList = timersanitycheck.getSimulTimerList() if simulTimerList is not None: self.session.openWithCallback(self.finishedEdit, TimerSanityConflict, timersanitycheck.getSimulTimerList()) else: success = True else: success = True if success: print "Sanity check passed" self.session.nav.RecordTimer.timeChanged(entry) self.fillTimerList() self.updateState() # else: # print "Timeredit aborted" def finishedAdd(self, answer): # print "finished add" if answer[0]: entry = answer[1] simulTimerList = self.session.nav.RecordTimer.record(entry) if simulTimerList is not None: for x in simulTimerList: if x.setAutoincreaseEnd(entry): self.session.nav.RecordTimer.timeChanged(x) simulTimerList = self.session.nav.RecordTimer.record(entry) if simulTimerList is not None: self.session.openWithCallback(self.finishSanityCorrection, TimerSanityConflict, simulTimerList) self.fillTimerList() self.updateState() # else: # print "Timeredit aborted" def finishSanityCorrection(self, answer): self.finishedAdd(answer) def leave(self): self.session.nav.RecordTimer.on_state_change.remove(self.onStateChange) self.close() def onStateChange(self, entry): self.refill() self.updateState() class TimerSanityConflict(Screen): EMPTY = 0 ENABLE = 1 DISABLE = 2 EDIT = 3 def __init__(self, session, timer): Screen.__init__(self, session) self.timer = timer print "TimerSanityConflict" self["timer1"] = TimerList(self.getTimerList(timer[0])) self.list = [] self.list2 = [] count = 0 for x in timer: if count != 0: self.list.append((_("Conflicting timer") + " " + str(count), x)) self.list2.append((timer[count], False)) count += 1 if count == 1: self.list.append((_("Channel not in services list"))) self["list"] = MenuList(self.list) self["timer2"] = TimerList(self.list2) self["key_red"] = Button("Edit") self["key_green"] = Button(" ") self["key_yellow"] = Button(" ") self["key_blue"] = Button(" ") self.key_green_choice = self.EMPTY self.key_yellow_choice = self.EMPTY self.key_blue_choice = self.EMPTY self["actions"] = ActionMap(["OkCancelActions", "DirectionActions", "ShortcutActions", "TimerEditActions"], { "ok": self.leave_ok, "cancel": self.leave_cancel, "red": self.editTimer1, "up": self.up, "down": self.down }, -1) self.setTitle(_("Timer sanity error")) self.onShown.append(self.updateState) def getTimerList(self, timer): return [(timer, False)] def editTimer1(self): self.session.openWithCallback(self.finishedEdit, TimerEntry, self["timer1"].getCurrent()) def editTimer2(self): self.session.openWithCallback(self.finishedEdit, TimerEntry, self["timer2"].getCurrent()) def toggleNewTimer(self): if self.timer[0].disabled: self.timer[0].disabled = False self.session.nav.RecordTimer.timeChanged(self.timer[0]) elif not self.timer[0].isRunning(): self.timer[0].disabled = True self.session.nav.RecordTimer.timeChanged(self.timer[0]) self.finishedEdit((True, self.timer[0])) def toggleTimer(self): x = self["list"].getSelectedIndex() + 1 # the first is the new timer so we do +1 here if self.timer[x].disabled: self.timer[x].disabled = False self.session.nav.RecordTimer.timeChanged(self.timer[x]) if not self.timer[0].isRunning(): self.timer[0].disabled = True self.session.nav.RecordTimer.timeChanged(self.timer[0]) elif not self.timer[x].isRunning(): self.timer[x].disabled = True self.session.nav.RecordTimer.timeChanged(self.timer[x]) if self.timer[x].disabled: self.timer[0].disabled = False self.session.nav.RecordTimer.timeChanged(self.timer[0]) self.finishedEdit((True, self.timer[0])) def finishedEdit(self, answer): self.leave_ok() def leave_ok(self): self.close((True, self.timer[0])) def leave_cancel(self): self.close((False, self.timer[0])) def up(self): self["list"].instance.moveSelection(self["list"].instance.moveUp) self["timer2"].moveToIndex(self["list"].getSelectedIndex()) def down(self): self["list"].instance.moveSelection(self["list"].instance.moveDown) self["timer2"].moveToIndex(self["list"].getSelectedIndex()) def removeAction(self, descr): actions = self["actions"].actions if descr in actions: del actions[descr] def updateState(self): if self.timer[0] is not None: if self.timer[0].disabled and self.key_green_choice != self.ENABLE: self["actions"].actions.update({"green":self.toggleTimer}) self["key_green"].setText(_("Enable")) self.key_green_choice = self.ENABLE elif self.timer[0].isRunning() and not self.timer[0].repeated and self.key_green_choice != self.EMPTY: self.removeAction("green") self["key_green"].setText(" ") self.key_green_choice = self.EMPTY elif (not self.timer[0].isRunning() or self.timer[0].repeated ) and self.key_green_choice != self.DISABLE: self["actions"].actions.update({"green":self.toggleNewTimer}) self["key_green"].setText(_("Disable")) self.key_green_choice = self.DISABLE if len(self.timer) > 1: x = self["list"].getSelectedIndex() + 1 # the first is the new timer so we do +1 here if self.timer[x] is not None: if self.key_yellow_choice == self.EMPTY: self["actions"].actions.update({"yellow":self.editTimer2}) self["key_yellow"].setText(_("Edit")) self.key_yellow_choice = self.EDIT if self.timer[x].disabled and self.key_blue_choice != self.ENABLE: self["actions"].actions.update({"blue":self.toggleTimer}) self["key_blue"].setText(_("Enable")) self.key_blue_choice = self.ENABLE elif self.timer[x].isRunning() and not self.timer[x].repeated and self.key_blue_choice != self.EMPTY: self.removeAction("blue") self["key_blue"].setText(" ") self.key_blue_choice = self.EMPTY elif (not self.timer[x].isRunning() or self.timer[x].repeated ) and self.key_blue_choice != self.DISABLE: self["actions"].actions.update({"blue":self.toggleTimer}) self["key_blue"].setText(_("Disable")) self.key_blue_choice = self.DISABLE else: #FIXME.... this doesnt hide the buttons self.... just the text if self.key_yellow_choice != self.EMPTY: self.removeAction("yellow") self["key_yellow"].setText(" ") self.key_yellow_choice = self.EMPTY if self.key_blue_choice != self.EMPTY: self.removeAction("blue") self["key_blue"].setText(" ") self.key_blue_choice = self.EMPTY class TimerEditListSummary(Screen): def __init__(self, session, parent): Screen.__init__(self, session, parent = parent) self["name"] = StaticText("") self["service"] = StaticText("") self["time"] = StaticText("") self["duration"] = StaticText("") self["state"] = StaticText("") self.onShow.append(self.addWatcher) self.onHide.append(self.removeWatcher) def addWatcher(self): self.parent.onChangedEntry.append(self.selectionChanged) self.parent.updateState() def removeWatcher(self): self.parent.onChangedEntry.remove(self.selectionChanged) def selectionChanged(self, name, time, duration, service, state): self["name"].text = name self["service"].text = service self["time"].text = time self["duration"].text = duration self["state"].text = state
gpl-2.0
jianlirong/incubator-hawq
pxf/src/scripts/pxf_manual_failover.py
12
5127
#!/usr/bin/env python # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # pxf_manual_failover.py # This python script will adapt the PXF external tables to the new NameNode in case # of High Availability manual failover. # The script receives as input the new namenode host and then goes over each external # table entry in the catalog table pg_exttable and updates the LOCATION field - # replaces the old Namenode host with the new one. import sys from gppylib.db import dbconn def wrongUsage(): 'Print usage string and leave' print "usage: pxf_manual_failover <new_namenode_host> <database> [-h <hawq_master_host>] [-p <hawq_master_port>]" exit() def getNewHost(): 'reads new NameNode from command line - exits if wrong input' if len(sys.argv) < 2: wrongUsage() return sys.argv[1] def getDatabase(): 'reads database from command line - exits if wrong input' if len(sys.argv) < 3: wrongUsage() return sys.argv[2] def getOptionalInput(flag, default): """generic function - retrieves optional parameters from the input If [flag <value>] is not on the command line, we use default Explaining the parsing. This is how the command line string that sys.argv returns, looks like: ['./pxf_manual_failover.py', 'localhost', 'films', '-h', 'isenshackamac.corp.emc.com', '-p', '5432'] """ input = list(sys.argv) if input.count(flag) == 0: return default flag_idx = input.index(flag) if len(input) < flag_idx +1: wrongUsage() return input[flag_idx + 1] def getMasterHost(): 'reads hawq_master_host from command line - optional' return getOptionalInput("-h", "localhost") def getMasterPort(): 'reads hawq_master_port from command line - optional' return getOptionalInput("-p", 5432) def isPxfTable(location): 'decide if this is a PXF table by analyzing the LOCATION field for the table entry in pg_exttable' return cmp(location[1:7],"pxf://") == 0 def makeNewLocation(new_host, location): 'replaces [host] substring in [location] with [new_host]' start = location.find("//") end = location.find(":", start) size = len(location) new_location = location[:start] + "//" + new_host + location[end:size] return new_location def promptUser(new_host, database, hawq_master_host, hawq_master_port): 'Give user a last chance to change his mind' print "Will replace the current Namenode hostname with [" + new_host + "] in database [" + database + "]" print "Hawq master is: [" + hawq_master_host + "] and Hawq port is: [" + str(hawq_master_port) + "]" reply = raw_input('Do you wish to continue: Yes[Y]/No[N] ?') reply = reply.lower() if not(cmp(reply, 'yes') == 0 or cmp(reply, 'y') == 0): print "User decided to cancel operation. Leaving..." exit() def connectToDb(hawq_master_host, hawq_master_port, database): 'connect to database' url = dbconn.DbURL(hawq_master_host ,port = hawq_master_port ,dbname = database ) return dbconn.connect(dburl = url) def updateOneRecord(conn, new_host, row): 'Updates the LOCATION field of one record' if not(isPxfTable(row[0])): return new_location = makeNewLocation(new_host, row[0]) dbconn.execSQL(conn, "UPDATE pg_exttable SET location = '" + new_location + "' WHERE reloid = " + str(row[1])) print "Updated LOCATION for table ", row[2], "oid: ", row[1], \ "\n Old LOCATION: ", row[0], "\n New LOCATION: ", new_location def updateNnHost(conn, new_host): 'update the LOCATION field for each record in pg_exttable' dbconn.execSQL(conn, "set allow_system_table_mods = 'DML'") dbconn.execSQL(conn, "START TRANSACTION") cursor = dbconn.execSQL(conn, "SELECT location, reloid, relname FROM pg_exttable, pg_class WHERE reloid = relfilenode") for row in cursor: updateOneRecord(conn, new_host, row) conn.commit() def main(): 'The driver function of this module' new_host = getNewHost() database = getDatabase() hawq_master_host = getMasterHost() hawq_master_port = getMasterPort() promptUser(new_host, database, hawq_master_host, hawq_master_port) conn = connectToDb(hawq_master_host, hawq_master_port, database) updateNnHost(conn, new_host) conn.close() if __name__ == "__main__": main()
apache-2.0
Mirdrack/4chanscrapper
lib/python2.7/site-packages/requests/structures.py
1160
2977
# -*- coding: utf-8 -*- """ requests.structures ~~~~~~~~~~~~~~~~~~~ Data structures that power Requests. """ import collections class CaseInsensitiveDict(collections.MutableMapping): """ A case-insensitive ``dict``-like object. Implements all methods and operations of ``collections.MutableMapping`` as well as dict's ``copy``. Also provides ``lower_items``. All keys are expected to be strings. The structure remembers the case of the last key to be set, and ``iter(instance)``, ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` will contain case-sensitive keys. However, querying and contains testing is case insensitive:: cid = CaseInsensitiveDict() cid['Accept'] = 'application/json' cid['aCCEPT'] == 'application/json' # True list(cid) == ['Accept'] # True For example, ``headers['content-encoding']`` will return the value of a ``'Content-Encoding'`` response header, regardless of how the header name was originally stored. If the constructor, ``.update``, or equality comparison operations are given keys that have equal ``.lower()``s, the behavior is undefined. """ def __init__(self, data=None, **kwargs): self._store = dict() if data is None: data = {} self.update(data, **kwargs) def __setitem__(self, key, value): # Use the lowercased key for lookups, but store the actual # key alongside the value. self._store[key.lower()] = (key, value) def __getitem__(self, key): return self._store[key.lower()][1] def __delitem__(self, key): del self._store[key.lower()] def __iter__(self): return (casedkey for casedkey, mappedvalue in self._store.values()) def __len__(self): return len(self._store) def lower_items(self): """Like iteritems(), but with all lowercase keys.""" return ( (lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items() ) def __eq__(self, other): if isinstance(other, collections.Mapping): other = CaseInsensitiveDict(other) else: return NotImplemented # Compare insensitively return dict(self.lower_items()) == dict(other.lower_items()) # Copy is required def copy(self): return CaseInsensitiveDict(self._store.values()) def __repr__(self): return str(dict(self.items())) class LookupDict(dict): """Dictionary lookup object.""" def __init__(self, name=None): self.name = name super(LookupDict, self).__init__() def __repr__(self): return '<lookup \'%s\'>' % (self.name) def __getitem__(self, key): # We allow fall-through here, so values default to None return self.__dict__.get(key, None) def get(self, key, default=None): return self.__dict__.get(key, default)
mit
libcrosswind/libcrosswind
platform/windows/compilers/x64/TDM-GCC-64/gdb64/bin/lib/wsgiref/headers.py
229
5879
"""Manage HTTP Response Headers Much of this module is red-handedly pilfered from email.message in the stdlib, so portions are Copyright (C) 2001,2002 Python Software Foundation, and were written by Barry Warsaw. """ from types import ListType, TupleType # Regular expression that matches `special' characters in parameters, the # existence of which force quoting of the parameter value. import re tspecials = re.compile(r'[ \(\)<>@,;:\\"/\[\]\?=]') def _formatparam(param, value=None, quote=1): """Convenience function to format and return a key=value pair. This will quote the value if needed or if quote is true. """ if value is not None and len(value) > 0: if quote or tspecials.search(value): value = value.replace('\\', '\\\\').replace('"', r'\"') return '%s="%s"' % (param, value) else: return '%s=%s' % (param, value) else: return param class Headers: """Manage a collection of HTTP response headers""" def __init__(self,headers): if type(headers) is not ListType: raise TypeError("Headers must be a list of name/value tuples") self._headers = headers def __len__(self): """Return the total number of headers, including duplicates.""" return len(self._headers) def __setitem__(self, name, val): """Set the value of a header.""" del self[name] self._headers.append((name, val)) def __delitem__(self,name): """Delete all occurrences of a header, if present. Does *not* raise an exception if the header is missing. """ name = name.lower() self._headers[:] = [kv for kv in self._headers if kv[0].lower() != name] def __getitem__(self,name): """Get the first header value for 'name' Return None if the header is missing instead of raising an exception. Note that if the header appeared multiple times, the first exactly which occurrance gets returned is undefined. Use getall() to get all the values matching a header field name. """ return self.get(name) def has_key(self, name): """Return true if the message contains the header.""" return self.get(name) is not None __contains__ = has_key def get_all(self, name): """Return a list of all the values for the named field. These will be sorted in the order they appeared in the original header list or were added to this instance, and may contain duplicates. Any fields deleted and re-inserted are always appended to the header list. If no fields exist with the given name, returns an empty list. """ name = name.lower() return [kv[1] for kv in self._headers if kv[0].lower()==name] def get(self,name,default=None): """Get the first header value for 'name', or return 'default'""" name = name.lower() for k,v in self._headers: if k.lower()==name: return v return default def keys(self): """Return a list of all the header field names. These will be sorted in the order they appeared in the original header list, or were added to this instance, and may contain duplicates. Any fields deleted and re-inserted are always appended to the header list. """ return [k for k, v in self._headers] def values(self): """Return a list of all header values. These will be sorted in the order they appeared in the original header list, or were added to this instance, and may contain duplicates. Any fields deleted and re-inserted are always appended to the header list. """ return [v for k, v in self._headers] def items(self): """Get all the header fields and values. These will be sorted in the order they were in the original header list, or were added to this instance, and may contain duplicates. Any fields deleted and re-inserted are always appended to the header list. """ return self._headers[:] def __repr__(self): return "Headers(%r)" % self._headers def __str__(self): """str() returns the formatted headers, complete with end line, suitable for direct HTTP transmission.""" return '\r\n'.join(["%s: %s" % kv for kv in self._headers]+['','']) def setdefault(self,name,value): """Return first matching header value for 'name', or 'value' If there is no header named 'name', add a new header with name 'name' and value 'value'.""" result = self.get(name) if result is None: self._headers.append((name,value)) return value else: return result def add_header(self, _name, _value, **_params): """Extended header setting. _name is the header field to add. keyword arguments can be used to set additional parameters for the header field, with underscores converted to dashes. Normally the parameter will be added as key="value" unless value is None, in which case only the key will be added. Example: h.add_header('content-disposition', 'attachment', filename='bud.gif') Note that unlike the corresponding 'email.message' method, this does *not* handle '(charset, language, value)' tuples: all values must be strings or None. """ parts = [] if _value is not None: parts.append(_value) for k, v in _params.items(): if v is None: parts.append(k.replace('_', '-')) else: parts.append(_formatparam(k.replace('_', '-'), v)) self._headers.append((_name, "; ".join(parts)))
gpl-3.0
prark/bitcoinxt
qa/rpc-tests/test_framework/blocktools.py
93
2057
# blocktools.py - utilities for manipulating blocks and transactions # # Distributed under the MIT/X11 software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # from mininode import * from script import CScript, CScriptOp # Create a block (with regtest difficulty) def create_block(hashprev, coinbase, nTime=None): block = CBlock() if nTime is None: import time block.nTime = int(time.time()+600) else: block.nTime = nTime block.hashPrevBlock = hashprev block.nBits = 0x207fffff # Will break after a difficulty adjustment... block.vtx.append(coinbase) block.hashMerkleRoot = block.calc_merkle_root() block.calc_sha256() return block def serialize_script_num(value): r = bytearray(0) if value == 0: return r neg = value < 0 absvalue = -value if neg else value while (absvalue): r.append(chr(absvalue & 0xff)) absvalue >>= 8 if r[-1] & 0x80: r.append(0x80 if neg else 0) elif neg: r[-1] |= 0x80 return r counter=1 # Create an anyone-can-spend coinbase transaction, assuming no miner fees def create_coinbase(heightAdjust = 0): global counter coinbase = CTransaction() coinbase.vin.append(CTxIn(COutPoint(0, 0xffffffff), ser_string(serialize_script_num(counter+heightAdjust)), 0xffffffff)) counter += 1 coinbaseoutput = CTxOut() coinbaseoutput.nValue = 50*100000000 halvings = int((counter+heightAdjust)/150) # regtest coinbaseoutput.nValue >>= halvings coinbaseoutput.scriptPubKey = "" coinbase.vout = [ coinbaseoutput ] coinbase.calc_sha256() return coinbase # Create a transaction with an anyone-can-spend output, that spends the # nth output of prevtx. def create_transaction(prevtx, n, sig, value): tx = CTransaction() assert(n < len(prevtx.vout)) tx.vin.append(CTxIn(COutPoint(prevtx.sha256, n), sig, 0xffffffff)) tx.vout.append(CTxOut(value, "")) tx.calc_sha256() return tx
mit
polaris-gslb/polaris-core
tests/test-polaris-pdns.py
2
1937
#!/usr/bin/env python3 import subprocess import sys import time import json POLARIS_PDNS_FILE = '/opt/polaris/bin/polaris-pdns' def pretty_json(s): d = json.loads(s) return json.dumps(d, indent=4, separators=(',', ': ')) class TestPolarisPDNS: def __init__(self, polaris_pdns_file): self.proc = subprocess.Popen([ polaris_pdns_file ], stdin=subprocess.PIPE, stdout=subprocess.PIPE) def execute_query(self, query): query += '\n' self.proc.stdin.write(query.encode()) self.proc.stdin.flush() output = self.proc.stdout.readline().decode() return pretty_json(output) def prepare_query(self, method, params): q = { 'method': method, 'parameters': { 'qtype': params['qtype'], 'qname': params['qname'], 'remote': params['remote'], 'local': params['local'], 'real-remote': params['real-remote'], 'zone-id': params['zone-id'] } } return json.dumps(q) if __name__ == '__main__': t = TestPolarisPDNS(POLARIS_PDNS_FILE) method = 'lookup' params = { 'qtype': 'A', 'qname': 'www.example.com', 'remote': '10.1.1.21', 'local': '0.0.0.0', 'real-remote': '10.1.1.21/32', 'zone-id': -1 } q = t.prepare_query(method, params) print("query: ", pretty_json(q), "\n") print("response: ", t.execute_query(q)) method = 'lookup' params = { 'qtype': 'SOA', 'qname': 'www.example.com', 'remote': '10.1.1.21', 'local': '0.0.0.0', 'real-remote': '10.1.1.21/32', 'zone-id': -1 } q = t.prepare_query(method, params) print("query: ", pretty_json(q), "\n") print("response: ", t.execute_query(q))
bsd-3-clause
daniel20162016/my-first
read_xml_all/calcul_matrix_compare_je_good_192matrix.py
1
6357
# -*- coding: utf-8 -*- """ Created on Mon Oct 31 15:45:22 2016 @author: wang """ #from matplotlib import pylab as plt #from numpy import fft, fromstring, int16, linspace #import wave from read_wav_xml_good_1 import* from matrix_24_2 import* from max_matrix_norm import* import numpy as np # open a wave file filename = 'francois_filon_pure_3.wav' filename_1 ='francois_filon_pure_3.xml' word ='je' wave_signal_float,framerate, word_start_point, word_length_point, word_end_point= read_wav_xml_good_1(filename,filename_1,word) #print 'word_start_point=',word_start_point #print 'word_length_point=',word_length_point #print 'word_end_point=',word_end_point XJ_1 =wave_signal_float t_step=1920; t_entre_step=1440; t_du_1_1 = int(word_start_point[0]); t_du_1_2 = int(word_end_point[0]); t_du_2_1 = int(word_start_point[1]); t_du_2_2 = int(word_end_point[1]); t_du_3_1 = int(word_start_point[2]); t_du_3_2 = int(word_end_point[2]); t_du_4_1 = int(word_start_point[3]); t_du_4_2 = int(word_end_point[3]); t_du_5_1 = int(word_start_point[4]); t_du_5_2 = int(word_end_point[4]); fs=framerate #XJ_du_1 = wave_signal_float[(t_du_1_1-1):t_du_1_2]; #length_XJ_du_1 = int(word_length_point[0]+1); #x1,y1,z1=matrix_24_2(XJ_du_1,fs) #x1=max_matrix_norm(x1) #============================================================================== # this part is to calcul the first matrix #============================================================================== XJ_du_1_2 = XJ_1[(t_du_1_1-1):(t_du_1_1+t_step)]; x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs) x1_1=max_matrix_norm(x1_1) matrix_all_step_new_1 = np.zeros([192]) for i in range(0,24): matrix_all_step_new_1[i]=x1_1[i] #============================================================================== # the other colonne is the all fft #============================================================================== for i in range(1,8): XJ_du_1_total = XJ_1[(t_du_1_1+t_entre_step*(i)-1):(t_du_1_1+t_step+t_entre_step*(i) )]; x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs) x1_all=max_matrix_norm(x1_all) for j in range(0,24): matrix_all_step_new_1[24*i+j]=x1_all[j] #============================================================================== # this part is to calcul the second matrix #============================================================================== for k in range (1,2): t_start=t_du_2_1 XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)]; x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs) x1_1=max_matrix_norm(x1_1) matrix_all_step_new_2 = np.zeros([192]) for i in range(0,24): matrix_all_step_new_2[i]=x1_1[i] #============================================================================== # the other colonne is the all fft #============================================================================== for i in range(1,8): XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )]; x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs) x1_all=max_matrix_norm(x1_all) for j in range(0,24): matrix_all_step_new_2[24*i+j]=x1_all[j] #============================================================================== # this part is to calcul the 3 matrix #============================================================================== for k in range (1,2): t_start=t_du_3_1 XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)]; x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs) x1_1=max_matrix_norm(x1_1) matrix_all_step_new_3 = np.zeros([192]) for i in range(0,24): matrix_all_step_new_3[i]=x1_1[i] #============================================================================== # the other colonne is the all fft #============================================================================== for i in range(1,8): XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )]; x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs) x1_all=max_matrix_norm(x1_all) for j in range(0,24): matrix_all_step_new_3[24*i+j]=x1_all[j] #============================================================================== # this part is to calcul the 4 matrix #============================================================================== for k in range (1,2): t_start=t_du_4_1 XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)]; x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs) x1_1=max_matrix_norm(x1_1) matrix_all_step_new_4 = np.zeros([192]) for i in range(0,24): matrix_all_step_new_4[i]=x1_1[i] #============================================================================== # the other colonne is the all fft #============================================================================== for i in range(1,8): # print i XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )]; x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs) x1_all=max_matrix_norm(x1_all) for j in range(0,24): matrix_all_step_new_4[24*i+j]=x1_all[j] #print 'matrix_all_step_4=',matrix_all_step_4 #============================================================================== # this part is to calcul the 5 matrix #============================================================================== for k in range (1,2): t_start=t_du_5_1 XJ_du_1_2 = XJ_1[(t_start-1):(t_start+t_step)]; x1_1,y1_1,z1_1=matrix_24_2(XJ_du_1_2 ,fs) x1_1=max_matrix_norm(x1_1) matrix_all_step_new_5 = np.zeros([192]) for i in range(0,24): matrix_all_step_new_5[i]=x1_1[i] #============================================================================== # the other colonne is the all fft #============================================================================== for i in range(1,8): # print i XJ_du_1_total = XJ_1[(t_start+t_entre_step*(i)-1):(t_start+t_step+t_entre_step*(i) )]; x1_all,y1_all,z1_all=matrix_24_2(XJ_du_1_total,fs) x1_all=max_matrix_norm(x1_all) for j in range(0,24): matrix_all_step_new_5[24*i+j]=x1_all[j] #print 'matrix_all_step_5=',matrix_all_step_5 np.savez('je_compare_192_matrix.npz',matrix_all_step_new_1,matrix_all_step_new_2,matrix_all_step_new_3,matrix_all_step_new_4,matrix_all_step_new_5)
mit
wholland/env
env.py
1
6119
#!/usr/bin/python import argparse import json import shutil import os def copy_file(src, dest, backup): success = True if not backup is None: (backup_folder, backup_file) = os.path.split(backup) print("Creating backup file for " + dest + " at " + backup) try: if not os.path.exists(backup_folder): os.makedirs(backup_folder) shutil.copyfile(dest, backup) except Exception as e: print("Backup failed: " + str(e)) success = False if success: (dest_folder, dest_file) = os.path.split(dest) print("Copy file " + src + " to " + dest) try: if not os.path.exists(dest_folder): os.makedirs(dest_folder) shutil.copyfile(src, dest) except IOError as e: print("Copy failed: " + str(e)) def copy_dir(src, dest, backup): success = True if not backup is None: try: print("Creating backup file for " + dest + " at " + backup) rmtree(backup, ignore_errors=True) shutil.copytree(dest, backup) except IOError as e: print("Backup failed: " + str(e)) success = False if success: try: print("Copy directory " + src + " to " + dest) shutil.copytree(src, dest) except IOError as e: print("Copy failed: " + str(e)) def push(args): defs = json.load(open(os.path.expanduser(args.file))) for definition in defs: if definition["group"] in args.categories: print("Pushing " + definition["name"]); src = os.path.expanduser(os.path.join(args.source, definition["source"])) dest = os.path.expanduser(os.path.join(args.target, definition["target"])) backup = os.path.expanduser(os.path.join(args.backup, definition["target"])) if definition["type"].lower() == "f": # Copy a file if args.unsafe: if not args.wimp: copy_file(src, dest, None) else: print("Would copy file. Src:" + src + " Dest:" + dest); else: if not args.wimp: copy_file(src, dest, backup) else: print("Would copy file. Src:" + src + " Dest:" + dest + " Backup:" + backup); elif definition["type"].lower() == "d": # Copy a directory if args.verbose: print(definition["name"] + ": Pushing directory from " + src + " to " + dest) if args.unsafe: if not args.wimp: copy_dir(src, dest, None) else: print("Would copy file. Src:" + src + " Dest:" + dest); else: if not args.wimp: copy_dir(src, dest, backup) else: print("Would copy dir. Src:" + src + " Dest:" + dest + " Backup:" + backup); else: print(definition["name"] + ": Unknown type \""+definition["type"]+"\"") def pull(args): defs = json.load(open(os.path.expanduser(args.file))) for definition in defs: if definition["group"] in args.categories: print("Pulling " + definition["name"]); src = os.path.expanduser(os.path.join(args.target, definition["target"])) dest = os.path.expanduser(os.path.join(args.source, definition["source"])) if definition["type"].lower() == "f": # Copy a file if not args.wimp: copy_file(src, dest, None) else: print("Would copy file. Src:" + src + " Dest:" + dest); elif definition["type"].lower() == "d": # Copy a directory if not args.wimp: copy_dir(src, dest, None) else: print("Would copy directory. Src:" + src + " Dest:" + dest); else: print(definition["name"] + ": Unknown type \""+definition["type"]+"\"") def revert(args): defs = json.load(open(os.path.expanduser(args.file))) for definition in defs: if definition["group"] in args.categories: src = os.path.expanduser(os.path.join(args.backup, definition["target"])) dest = os.path.expanduser(os.path.join(args.target, definition["target"])) if definition["type"].lower() == "f": # Copy a file if not args.wimp: copy_file(src, dest, None) else: print("Would copy file. Src:" + src + " Dest:" + dest); elif definition["type"].lower() == "d": # Copy a directory if not args.wimp: copy_dir(src, dest, None) else: print("Would copy directory. Src:" + src + " Dest:" + dest); else: print(definition["name"] + ": Unknown type \""+definition["type"]+"\"") def main(): default_defs = "~/env/env.def" default_source = "~/env/" default_target = "~/" default_backup = "~/.backup/env/" parser = argparse.ArgumentParser() parser.add_argument("-v", "--verbose", action="store_true", help="Increase Verbosity") parser.add_argument("-f", "--file", default=default_defs, help="Definition File to use") parser.add_argument("-s", "--source", default=default_source, help="Override source root") parser.add_argument("-t", "--target", default=default_target, help="Override target root") parser.add_argument("-w", "--wimp", action="store_true", help="Don't actually make any changes (implies -v)") subparsers = parser.add_subparsers() parser_push = subparsers.add_parser("push", help="Push configs into environment") parser_push.add_argument("-u", "--unsafe", action="store_true", help="No backups Created") parser_push.add_argument("-a", "--All", action="store_true", help="Cleanup Backups") parser_push.add_argument("-b", "--backup", default=default_backup, help="Override backup root") parser_push.add_argument("categories", nargs=argparse.REMAINDER) parser_push.set_defaults(func=push) parser_pull = subparsers.add_parser("pull", help="Pull configs from environment") parser_pull.add_argument("-a", "--All", action="store_true", help="Cleanup Backups") parser_pull.add_argument("categories", nargs=argparse.REMAINDER) parser_pull.set_defaults(func=pull) parser_revert = subparsers.add_parser("revert", help="Revert configs from backups") parser_revert.add_argument("-c", "--cleanup", action="store_true", help="Cleanup Backups") parser_revert.add_argument("-a", "--All", action="store_true", help="Cleanup Backups") parser_revert.add_argument("-b", "--backup", default=default_backup, help="Override backup root") parser_revert.add_argument("categories", nargs=argparse.REMAINDER) parser_revert.set_defaults(func=revert) args = parser.parse_args() if args.wimp: args.verbose = True args.func(args) if __name__ == "__main__": main();
mit
jailuthra/misc
python/quicksort.py
1
1066
import sys import random comparisons = 0 def main(): global comparisons with open(sys.argv[1], 'r') as f: arr = [int(x) for x in f.read().split()] quicksort(arr, 0, len(arr)-1) # print(arr) print(comparisons) def getPivot(arr, l, r): first = arr[l] mid = arr[(l+r)//2] last = arr[r] if first <= mid <= last or last <= mid <= first: return (l+r)//2 elif mid <= first <= last or last <= first <= mid: return l else: return r def partition(arr, l, r): k = getPivot(arr, l, r) k = random.randint(l, r) pivot = arr[k] arr[k], arr[l] = arr[l], arr[k] i = l+1 for j in range(l+1, r+1): if arr[j] < pivot: arr[j], arr[i] = arr[i], arr[j] i += 1 arr[l], arr[i-1] = arr[i-1], arr[l] return i-1 def quicksort(arr, l, r): if r - l < 0: return global comparisons comparisons += r - l p = partition(arr, l, r) quicksort(arr, l, p-1) quicksort(arr, p+1, r) if __name__ == '__main__': main()
mit
kosz85/django
django/conf/locale/nn/formats.py
65
1743
# This file is distributed under the same license as the Django package. # # The *_FORMAT strings use the Django date format syntax, # see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date DATE_FORMAT = 'j. F Y' TIME_FORMAT = 'H:i' DATETIME_FORMAT = 'j. F Y H:i' YEAR_MONTH_FORMAT = 'F Y' MONTH_DAY_FORMAT = 'j. F' SHORT_DATE_FORMAT = 'd.m.Y' SHORT_DATETIME_FORMAT = 'd.m.Y H:i' FIRST_DAY_OF_WEEK = 1 # Monday # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior # Kept ISO formats as they are in first position DATE_INPUT_FORMATS = [ '%Y-%m-%d', '%d.%m.%Y', '%d.%m.%y', # '2006-10-25', '25.10.2006', '25.10.06' # '%d. %b %Y', '%d %b %Y', # '25. okt 2006', '25 okt 2006' # '%d. %b. %Y', '%d %b. %Y', # '25. okt. 2006', '25 okt. 2006' # '%d. %B %Y', '%d %B %Y', # '25. oktober 2006', '25 oktober 2006' ] DATETIME_INPUT_FORMATS = [ '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' '%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200' '%Y-%m-%d %H:%M', # '2006-10-25 14:30' '%Y-%m-%d', # '2006-10-25' '%Y-%m-%d', # '2006-10-25' '%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59' '%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200' '%d.%m.%Y %H:%M', # '25.10.2006 14:30' '%d.%m.%Y', # '25.10.2006' '%d.%m.%y %H:%M:%S', # '25.10.06 14:30:59' '%d.%m.%y %H:%M:%S.%f', # '25.10.06 14:30:59.000200' '%d.%m.%y %H:%M', # '25.10.06 14:30' '%d.%m.%y', # '25.10.06' ] DECIMAL_SEPARATOR = ',' THOUSAND_SEPARATOR = '\xa0' # non-breaking space NUMBER_GROUPING = 3
bsd-3-clause
rubenvereecken/pokemongo-api
POGOProtos/Data/Battle/BattleParticipant_pb2.py
16
4760
# Generated by the protocol buffer compiler. DO NOT EDIT! # source: POGOProtos/Data/Battle/BattleParticipant.proto import sys _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() from POGOProtos.Data.Battle import BattlePokemonInfo_pb2 as POGOProtos_dot_Data_dot_Battle_dot_BattlePokemonInfo__pb2 from POGOProtos.Data.Player import PlayerPublicProfile_pb2 as POGOProtos_dot_Data_dot_Player_dot_PlayerPublicProfile__pb2 DESCRIPTOR = _descriptor.FileDescriptor( name='POGOProtos/Data/Battle/BattleParticipant.proto', package='POGOProtos.Data.Battle', syntax='proto3', serialized_pb=_b('\n.POGOProtos/Data/Battle/BattleParticipant.proto\x12\x16POGOProtos.Data.Battle\x1a.POGOProtos/Data/Battle/BattlePokemonInfo.proto\x1a\x30POGOProtos/Data/Player/PlayerPublicProfile.proto\"\xac\x02\n\x11\x42\x61ttleParticipant\x12\x41\n\x0e\x61\x63tive_pokemon\x18\x01 \x01(\x0b\x32).POGOProtos.Data.Battle.BattlePokemonInfo\x12K\n\x16trainer_public_profile\x18\x02 \x01(\x0b\x32+.POGOProtos.Data.Player.PlayerPublicProfile\x12\x42\n\x0freverse_pokemon\x18\x03 \x03(\x0b\x32).POGOProtos.Data.Battle.BattlePokemonInfo\x12\x43\n\x10\x64\x65\x66\x65\x61ted_pokemon\x18\x04 \x03(\x0b\x32).POGOProtos.Data.Battle.BattlePokemonInfob\x06proto3') , dependencies=[POGOProtos_dot_Data_dot_Battle_dot_BattlePokemonInfo__pb2.DESCRIPTOR,POGOProtos_dot_Data_dot_Player_dot_PlayerPublicProfile__pb2.DESCRIPTOR,]) _sym_db.RegisterFileDescriptor(DESCRIPTOR) _BATTLEPARTICIPANT = _descriptor.Descriptor( name='BattleParticipant', full_name='POGOProtos.Data.Battle.BattleParticipant', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='active_pokemon', full_name='POGOProtos.Data.Battle.BattleParticipant.active_pokemon', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='trainer_public_profile', full_name='POGOProtos.Data.Battle.BattleParticipant.trainer_public_profile', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='reverse_pokemon', full_name='POGOProtos.Data.Battle.BattleParticipant.reverse_pokemon', index=2, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='defeated_pokemon', full_name='POGOProtos.Data.Battle.BattleParticipant.defeated_pokemon', index=3, number=4, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=173, serialized_end=473, ) _BATTLEPARTICIPANT.fields_by_name['active_pokemon'].message_type = POGOProtos_dot_Data_dot_Battle_dot_BattlePokemonInfo__pb2._BATTLEPOKEMONINFO _BATTLEPARTICIPANT.fields_by_name['trainer_public_profile'].message_type = POGOProtos_dot_Data_dot_Player_dot_PlayerPublicProfile__pb2._PLAYERPUBLICPROFILE _BATTLEPARTICIPANT.fields_by_name['reverse_pokemon'].message_type = POGOProtos_dot_Data_dot_Battle_dot_BattlePokemonInfo__pb2._BATTLEPOKEMONINFO _BATTLEPARTICIPANT.fields_by_name['defeated_pokemon'].message_type = POGOProtos_dot_Data_dot_Battle_dot_BattlePokemonInfo__pb2._BATTLEPOKEMONINFO DESCRIPTOR.message_types_by_name['BattleParticipant'] = _BATTLEPARTICIPANT BattleParticipant = _reflection.GeneratedProtocolMessageType('BattleParticipant', (_message.Message,), dict( DESCRIPTOR = _BATTLEPARTICIPANT, __module__ = 'POGOProtos.Data.Battle.BattleParticipant_pb2' # @@protoc_insertion_point(class_scope:POGOProtos.Data.Battle.BattleParticipant) )) _sym_db.RegisterMessage(BattleParticipant) # @@protoc_insertion_point(module_scope)
mit
sarahfo/oppia
core/domain/dependency_registry_test.py
29
4131
# coding: utf-8 # # Copyright 2014 The Oppia Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for JavaScript library dependencies.""" __author__ = 'Sean Lip' from core.domain import dependency_registry from core.domain import exp_services from core.domain import interaction_registry from core.tests import test_utils import feconf class DependencyRegistryTests(test_utils.GenericTestBase): """Tests for the dependency registry.""" def test_get_dependency_html(self): self.assertIn( 'jsrepl', dependency_registry.Registry.get_dependency_html('jsrepl')) with self.assertRaises(IOError): dependency_registry.Registry.get_dependency_html('a') class DependencyControllerTests(test_utils.GenericTestBase): """Tests for dependency loading on user-facing pages.""" def test_no_dependencies_in_non_exploration_pages(self): response = self.testapp.get(feconf.GALLERY_URL) self.assertEqual(response.status_int, 200) response.mustcontain(no=['jsrepl']) response = self.testapp.get('/about') self.assertEqual(response.status_int, 200) response.mustcontain(no=['jsrepl']) def test_dependencies_loaded_in_exploration_editor(self): exp_services.load_demo('0') # Register and login as an editor. self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME) self.login(self.EDITOR_EMAIL) # Verify that the exploration does not have a jsrepl dependency. exploration = exp_services.get_exploration_by_id('0') interaction_ids = exploration.get_interaction_ids() all_dependency_ids = ( interaction_registry.Registry.get_deduplicated_dependency_ids( interaction_ids)) self.assertNotIn('jsrepl', all_dependency_ids) # However, jsrepl is loaded in the exploration editor anyway, since # all dependencies are loaded in the exploration editor. response = self.testapp.get('/create/0') self.assertEqual(response.status_int, 200) response.mustcontain('jsrepl') self.logout() def test_dependency_does_not_load_in_exploration_not_containing_it(self): EXP_ID = '0' exp_services.load_demo(EXP_ID) # Verify that exploration 0 does not have a jsrepl dependency. exploration = exp_services.get_exploration_by_id(EXP_ID) interaction_ids = exploration.get_interaction_ids() all_dependency_ids = ( interaction_registry.Registry.get_deduplicated_dependency_ids( interaction_ids)) self.assertNotIn('jsrepl', all_dependency_ids) # Thus, jsrepl is not loaded in the exploration reader. response = self.testapp.get('/explore/%s' % EXP_ID) self.assertEqual(response.status_int, 200) response.mustcontain(no=['jsrepl']) def test_dependency_loads_in_exploration_containing_it(self): EXP_ID = '1' exp_services.load_demo(EXP_ID) # Verify that exploration 1 has a jsrepl dependency. exploration = exp_services.get_exploration_by_id(EXP_ID) interaction_ids = exploration.get_interaction_ids() all_dependency_ids = ( interaction_registry.Registry.get_deduplicated_dependency_ids( interaction_ids)) self.assertIn('jsrepl', all_dependency_ids) # Thus, jsrepl is loaded in the exploration reader. response = self.testapp.get('/explore/%s' % EXP_ID) self.assertEqual(response.status_int, 200) response.mustcontain('jsrepl')
apache-2.0
jobscore/sync-engine
migrations/env.py
3
2894
from __future__ import with_statement from alembic import context from logging.config import fileConfig # Interpret the config file for Python logging. # This line sets up loggers basically. fileConfig(context.config.config_file_name) # add your model's MetaData object here # for 'autogenerate' support # from myapp import mymodel from inbox.models.base import MailSyncBase target_metadata = MailSyncBase.metadata from inbox.config import config from inbox.ignition import EngineManager # Alembic configuration is confusing. Here we look for a shard id both as a # "main option" (where it's programmatically set by bin/create-db), and in the # "x" argument, which is the primary facility for passing additional # command-line args to alembic. So you would do e.g. # # alembic -x shard_id=1 upgrade +1 # # to target shard 1 for the migration. config_shard_id = context.config.get_main_option('shard_id') x_shard_id = context.get_x_argument(as_dictionary=True).get( 'shard_id') if config_shard_id is not None: shard_id = int(config_shard_id) elif x_shard_id is not None: shard_id = int(x_shard_id) else: raise ValueError('No shard_id is configured for migration; ' 'run `alembic -x shard_id=<target shard id> upgrade +1`') def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ engine_manager = EngineManager(config.get_required('DATABASE_HOSTS'), config.get_required('DATABASE_USERS'), include_disabled=True) engine = engine_manager.engines[shard_id] context.configure(engine=engine, url=engine.url) with context.begin_transaction(): context.run_migrations() def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ engine_manager = EngineManager(config.get_required('DATABASE_HOSTS'), config.get_required('DATABASE_USERS'), include_disabled=True) engine = engine_manager.engines[shard_id] connection = engine.connect() # Set sane lock wait timeout value. connection.execute('SET @@lock_wait_timeout=15') context.configure( connection=connection, target_metadata=target_metadata ) try: with context.begin_transaction(): context.run_migrations() finally: connection.close() if context.is_offline_mode(): run_migrations_offline() else: run_migrations_online()
agpl-3.0
nens/threedi-qgis-plugin
tests/test_geo_utils.py
1
1446
""" Test geo utils. """ from qgis.core import QgsCoordinateTransform from ThreeDiToolbox.tests.utilities import ensure_qgis_app_is_initialized from ThreeDiToolbox.utils.geo_utils import get_coord_transformation_instance import pytest @pytest.fixture def rdnew_to_wgs84(): ensure_qgis_app_is_initialized() src_epsg, dest_epsg = 28992, 4326 transformer = get_coord_transformation_instance(src_epsg, dest_epsg) return transformer @pytest.fixture def wgs84_to_rdnew(): ensure_qgis_app_is_initialized() src_epsg, dest_epsg = 4326, 28992 transformer = get_coord_transformation_instance(src_epsg, dest_epsg) return transformer def test_get_coord_transformation_instance(rdnew_to_wgs84, wgs84_to_rdnew): assert isinstance(rdnew_to_wgs84, QgsCoordinateTransform) assert isinstance(wgs84_to_rdnew, QgsCoordinateTransform) def test_get_coord_transformation_epsg(rdnew_to_wgs84): assert rdnew_to_wgs84.sourceCrs().isValid() assert rdnew_to_wgs84.sourceCrs().authid() == "EPSG:28992" assert rdnew_to_wgs84.destinationCrs().isValid() assert rdnew_to_wgs84.destinationCrs().authid() == "EPSG:4326" def test_get_coord_transformation_epsg_reverse(wgs84_to_rdnew): assert wgs84_to_rdnew.sourceCrs().isValid() assert wgs84_to_rdnew.sourceCrs().authid() == "EPSG:4326" assert wgs84_to_rdnew.destinationCrs().isValid() assert wgs84_to_rdnew.destinationCrs().authid() == "EPSG:28992"
gpl-3.0
xfournet/intellij-community
python/lib/Lib/unicodedata.py
69
6437
from bisect import bisect_left import operator import java.lang.Character # XXX - this is intended as a stopgap measure until 2.5.1, which will have a Java implementation # requires java 6 for `normalize` function # only has one version of the database # does not normalized ideographs _codepoints = {} _eaw = {} _names = {} _segments = [] _eaw_segments = [] Nonesuch = object() def get_int(col): try: return int(col) except ValueError: return None def get_yn(col): if col == 'Y': return 1 else: return 0 def get_numeric(col): try: return float(col) except ValueError: try: a, b = col.split('/') return float(a)/float(b) except: return None def init_unicodedata(data): for row in data: cols = row.split(';') codepoint = int(cols[0], 16) name = cols[1] if name == '<CJK Ideograph, Last>': lookup_name = 'CJK UNIFIED IDEOGRAPH' else: lookup_name = name data = ( cols[2], get_int(cols[3]), cols[4], cols[5], get_int(cols[6]), get_int(cols[7]), get_numeric(cols[8]), get_yn(cols[9]), lookup_name, ) if name.find('First') >= 0: start = codepoint elif name.find('Last') >= 0: _segments.append((start, (start, codepoint), data)) else: _names[name] = unichr(codepoint) _codepoints[codepoint] = data def init_east_asian_width(data): for row in data: if row.startswith('#'): continue row = row.partition('#')[0] cols = row.split(';') if len(cols) < 2: continue cr = cols[0].split('..') width = cols[1].rstrip() if len(cr) == 1: codepoint = int(cr[0], 16) _eaw[codepoint] = width else: start = int(cr[0], 16) end = int(cr[1], 16) _eaw_segments.append((start, (start, end), width)) # xxx - need to normalize the segments, so # <CJK Ideograph, Last> ==> CJK UNIFIED IDEOGRAPH; # may need to do some sort of analysis against CPython for the normalization! def name(unichr, default=None): codepoint = get_codepoint(unichr, "name") v = _codepoints.get(codepoint, None) if v is None: v = check_segments(codepoint, _segments) if v is not None: return "%s-%X" % (v[8], codepoint) if v is None: if default is not Nonesuch: return default raise ValueError() return v[8] # xxx - also need to add logic here so that if it's CJK UNIFIED # IDEOGRAPH-8000, we go against the segment to verify the prefix def lookup(name): return _names[name] def check_segments(codepoint, segments): i = bisect_left(segments, (codepoint,)) if i < len(segments): segment = segments[i - 1] if codepoint <= segment[1][1]: return segment[2] return None def get_codepoint(unichr, fn=None): if not(isinstance(unichr, unicode)): raise TypeError(fn, "() argument 1 must be unicode, not " + type(unichr)) if len(unichr) > 1 or len(unichr) == 0: raise TypeError("need a single Unicode character as parameter") return ord(unichr) def get_eaw(unichr, default, fn): codepoint = get_codepoint(unichr, fn) v = _eaw.get(codepoint, None) if v is None: v = check_segments(codepoint, _eaw_segments) if v is None: if default is not Nonesuch: return default raise ValueError() return v def get(unichr, default, fn, getter): codepoint = get_codepoint(unichr, fn) data = _codepoints.get(codepoint, None) if data is None: data = check_segments(codepoint, _segments) if data is None: if default is not Nonesuch: return default raise ValueError() v = getter(data) if v is None: if default is not Nonesuch: return default raise ValueError() else: return v category_getter = operator.itemgetter(0) combining_getter = operator.itemgetter(1) bidirectional_getter = operator.itemgetter(2) decomposition_getter = operator.itemgetter(3) decimal_getter = operator.itemgetter(4) digit_getter = operator.itemgetter(5) numeric_getter = operator.itemgetter(6) mirrored_getter = operator.itemgetter(7) def decimal(unichr, default=Nonesuch): return get(unichr, default, 'decimal', decimal_getter) def decomposition(unichr, default=''): return get(unichr, default, 'decomposition', decomposition_getter) def digit(unichr, default=Nonesuch): return get(unichr, default, 'digit', digit_getter) def numeric(unichr, default=Nonesuch): return get(unichr, default, 'numeric', numeric_getter) def category(unichr): return get(unichr, 'Cn', 'catgegory', category_getter) def bidirectional(unichr): return get(unichr, '', 'bidirectional', bidirectional_getter) def combining(unichr): return get(unichr, 0, 'combining', combining_getter) def mirrored(unichr): return get(unichr, 0, 'mirrored', mirrored_getter) def east_asian_width(unichr): return get_eaw(unichr, 'N', 'east_asian_width') def jymirrored(unichr): return java.lang.Character.isMirrored(get_codepoint(unichr, 'mirrored')) try: from java.text import Normalizer _forms = { 'NFC': Normalizer.Form.NFC, 'NFKC': Normalizer.Form.NFKC, 'NFD': Normalizer.Form.NFD, 'NFKD': Normalizer.Form.NFKD } def normalize(form, unistr): """ Return the normal form 'form' for the Unicode string unistr. Valid values for form are 'NFC', 'NFKC', 'NFD', and 'NFKD'. """ try: normalizer_form = _forms[form] except KeyError: raise ValueError('invalid normalization form') return Normalizer.normalize(unistr, normalizer_form) except ImportError: pass def init(): import pkgutil import os.path import StringIO import sys my_path = os.path.dirname(__file__) loader = pkgutil.get_loader('unicodedata') init_unicodedata(StringIO.StringIO(loader.get_data(os.path.join(my_path, 'UnicodeData.txt')))) init_east_asian_width(StringIO.StringIO(loader.get_data(os.path.join(my_path, 'EastAsianWidth.txt')))) init()
apache-2.0
keithroe/vtkoptix
ThirdParty/Twisted/twisted/test/test_ident.py
41
6029
# Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """ Test cases for twisted.protocols.ident module. """ import struct from twisted.protocols import ident from twisted.python import failure from twisted.internet import error from twisted.internet import defer from twisted.trial import unittest from twisted.test.proto_helpers import StringTransport class ClassParserTestCase(unittest.TestCase): """ Test parsing of ident responses. """ def setUp(self): """ Create a ident client used in tests. """ self.client = ident.IdentClient() def test_indentError(self): """ 'UNKNOWN-ERROR' error should map to the L{ident.IdentError} exception. """ d = defer.Deferred() self.client.queries.append((d, 123, 456)) self.client.lineReceived('123, 456 : ERROR : UNKNOWN-ERROR') return self.assertFailure(d, ident.IdentError) def test_noUSerError(self): """ 'NO-USER' error should map to the L{ident.NoUser} exception. """ d = defer.Deferred() self.client.queries.append((d, 234, 456)) self.client.lineReceived('234, 456 : ERROR : NO-USER') return self.assertFailure(d, ident.NoUser) def test_invalidPortError(self): """ 'INVALID-PORT' error should map to the L{ident.InvalidPort} exception. """ d = defer.Deferred() self.client.queries.append((d, 345, 567)) self.client.lineReceived('345, 567 : ERROR : INVALID-PORT') return self.assertFailure(d, ident.InvalidPort) def test_hiddenUserError(self): """ 'HIDDEN-USER' error should map to the L{ident.HiddenUser} exception. """ d = defer.Deferred() self.client.queries.append((d, 567, 789)) self.client.lineReceived('567, 789 : ERROR : HIDDEN-USER') return self.assertFailure(d, ident.HiddenUser) def test_lostConnection(self): """ A pending query which failed because of a ConnectionLost should receive an L{ident.IdentError}. """ d = defer.Deferred() self.client.queries.append((d, 765, 432)) self.client.connectionLost(failure.Failure(error.ConnectionLost())) return self.assertFailure(d, ident.IdentError) class TestIdentServer(ident.IdentServer): def lookup(self, serverAddress, clientAddress): return self.resultValue class TestErrorIdentServer(ident.IdentServer): def lookup(self, serverAddress, clientAddress): raise self.exceptionType() class NewException(RuntimeError): pass class ServerParserTestCase(unittest.TestCase): def testErrors(self): p = TestErrorIdentServer() p.makeConnection(StringTransport()) L = [] p.sendLine = L.append p.exceptionType = ident.IdentError p.lineReceived('123, 345') self.assertEqual(L[0], '123, 345 : ERROR : UNKNOWN-ERROR') p.exceptionType = ident.NoUser p.lineReceived('432, 210') self.assertEqual(L[1], '432, 210 : ERROR : NO-USER') p.exceptionType = ident.InvalidPort p.lineReceived('987, 654') self.assertEqual(L[2], '987, 654 : ERROR : INVALID-PORT') p.exceptionType = ident.HiddenUser p.lineReceived('756, 827') self.assertEqual(L[3], '756, 827 : ERROR : HIDDEN-USER') p.exceptionType = NewException p.lineReceived('987, 789') self.assertEqual(L[4], '987, 789 : ERROR : UNKNOWN-ERROR') errs = self.flushLoggedErrors(NewException) self.assertEqual(len(errs), 1) for port in -1, 0, 65536, 65537: del L[:] p.lineReceived('%d, 5' % (port,)) p.lineReceived('5, %d' % (port,)) self.assertEqual( L, ['%d, 5 : ERROR : INVALID-PORT' % (port,), '5, %d : ERROR : INVALID-PORT' % (port,)]) def testSuccess(self): p = TestIdentServer() p.makeConnection(StringTransport()) L = [] p.sendLine = L.append p.resultValue = ('SYS', 'USER') p.lineReceived('123, 456') self.assertEqual(L[0], '123, 456 : USERID : SYS : USER') if struct.pack('=L', 1)[0] == '\x01': _addr1 = '0100007F' _addr2 = '04030201' else: _addr1 = '7F000001' _addr2 = '01020304' class ProcMixinTestCase(unittest.TestCase): line = ('4: %s:0019 %s:02FA 0A 00000000:00000000 ' '00:00000000 00000000 0 0 10927 1 f72a5b80 ' '3000 0 0 2 -1') % (_addr1, _addr2) def testDottedQuadFromHexString(self): p = ident.ProcServerMixin() self.assertEqual(p.dottedQuadFromHexString(_addr1), '127.0.0.1') def testUnpackAddress(self): p = ident.ProcServerMixin() self.assertEqual(p.unpackAddress(_addr1 + ':0277'), ('127.0.0.1', 631)) def testLineParser(self): p = ident.ProcServerMixin() self.assertEqual( p.parseLine(self.line), (('127.0.0.1', 25), ('1.2.3.4', 762), 0)) def testExistingAddress(self): username = [] p = ident.ProcServerMixin() p.entries = lambda: iter([self.line]) p.getUsername = lambda uid: (username.append(uid), 'root')[1] self.assertEqual( p.lookup(('127.0.0.1', 25), ('1.2.3.4', 762)), (p.SYSTEM_NAME, 'root')) self.assertEqual(username, [0]) def testNonExistingAddress(self): p = ident.ProcServerMixin() p.entries = lambda: iter([self.line]) self.assertRaises(ident.NoUser, p.lookup, ('127.0.0.1', 26), ('1.2.3.4', 762)) self.assertRaises(ident.NoUser, p.lookup, ('127.0.0.1', 25), ('1.2.3.5', 762)) self.assertRaises(ident.NoUser, p.lookup, ('127.0.0.1', 25), ('1.2.3.4', 763))
bsd-3-clause
agconti/Shopify-Django
venv/lib/python2.7/site-packages/django/contrib/gis/db/models/sql/compiler.py
93
13247
try: from itertools import zip_longest except ImportError: from itertools import izip_longest as zip_longest from django.utils.six.moves import zip from django.db.backends.util import truncate_name, typecast_timestamp from django.db.models.sql import compiler from django.db.models.sql.constants import MULTI from django.utils import six SQLCompiler = compiler.SQLCompiler class GeoSQLCompiler(compiler.SQLCompiler): def get_columns(self, with_aliases=False): """ Return the list of columns to use in the select statement. If no columns have been specified, returns all columns relating to fields in the model. If 'with_aliases' is true, any column names that are duplicated (without the table names) are given unique aliases. This is needed in some cases to avoid ambiguitity with nested queries. This routine is overridden from Query to handle customized selection of geometry columns. """ qn = self.quote_name_unless_alias qn2 = self.connection.ops.quote_name result = ['(%s) AS %s' % (self.get_extra_select_format(alias) % col[0], qn2(alias)) for alias, col in six.iteritems(self.query.extra_select)] aliases = set(self.query.extra_select.keys()) if with_aliases: col_aliases = aliases.copy() else: col_aliases = set() if self.query.select: only_load = self.deferred_to_columns() # This loop customized for GeoQuery. for col, field in zip(self.query.select, self.query.select_fields): if isinstance(col, (list, tuple)): alias, column = col table = self.query.alias_map[alias].table_name if table in only_load and column not in only_load[table]: continue r = self.get_field_select(field, alias, column) if with_aliases: if col[1] in col_aliases: c_alias = 'Col%d' % len(col_aliases) result.append('%s AS %s' % (r, c_alias)) aliases.add(c_alias) col_aliases.add(c_alias) else: result.append('%s AS %s' % (r, qn2(col[1]))) aliases.add(r) col_aliases.add(col[1]) else: result.append(r) aliases.add(r) col_aliases.add(col[1]) else: result.append(col.as_sql(qn, self.connection)) if hasattr(col, 'alias'): aliases.add(col.alias) col_aliases.add(col.alias) elif self.query.default_cols: cols, new_aliases = self.get_default_columns(with_aliases, col_aliases) result.extend(cols) aliases.update(new_aliases) max_name_length = self.connection.ops.max_name_length() result.extend([ '%s%s' % ( self.get_extra_select_format(alias) % aggregate.as_sql(qn, self.connection), alias is not None and ' AS %s' % qn(truncate_name(alias, max_name_length)) or '' ) for alias, aggregate in self.query.aggregate_select.items() ]) # This loop customized for GeoQuery. for (table, col), field in zip(self.query.related_select_cols, self.query.related_select_fields): r = self.get_field_select(field, table, col) if with_aliases and col in col_aliases: c_alias = 'Col%d' % len(col_aliases) result.append('%s AS %s' % (r, c_alias)) aliases.add(c_alias) col_aliases.add(c_alias) else: result.append(r) aliases.add(r) col_aliases.add(col) self._select_aliases = aliases return result def get_default_columns(self, with_aliases=False, col_aliases=None, start_alias=None, opts=None, as_pairs=False, local_only=False): """ Computes the default columns for selecting every field in the base model. Will sometimes be called to pull in related models (e.g. via select_related), in which case "opts" and "start_alias" will be given to provide a starting point for the traversal. Returns a list of strings, quoted appropriately for use in SQL directly, as well as a set of aliases used in the select statement (if 'as_pairs' is True, returns a list of (alias, col_name) pairs instead of strings as the first component and None as the second component). This routine is overridden from Query to handle customized selection of geometry columns. """ result = [] if opts is None: opts = self.query.model._meta aliases = set() only_load = self.deferred_to_columns() if start_alias: seen = {None: start_alias} for field, model in opts.get_fields_with_model(): # For local fields (even if through proxy) the model should # be None. if model == opts.concrete_model: model = None if local_only and model is not None: continue if start_alias: try: alias = seen[model] except KeyError: link_field = opts.get_ancestor_link(model) alias = self.query.join((start_alias, model._meta.db_table, link_field.column, model._meta.pk.column)) seen[model] = alias else: # If we're starting from the base model of the queryset, the # aliases will have already been set up in pre_sql_setup(), so # we can save time here. alias = self.query.included_inherited_models[model] table = self.query.alias_map[alias].table_name if table in only_load and field.column not in only_load[table]: continue if as_pairs: result.append((alias, field.column)) aliases.add(alias) continue # This part of the function is customized for GeoQuery. We # see if there was any custom selection specified in the # dictionary, and set up the selection format appropriately. field_sel = self.get_field_select(field, alias) if with_aliases and field.column in col_aliases: c_alias = 'Col%d' % len(col_aliases) result.append('%s AS %s' % (field_sel, c_alias)) col_aliases.add(c_alias) aliases.add(c_alias) else: r = field_sel result.append(r) aliases.add(r) if with_aliases: col_aliases.add(field.column) return result, aliases def resolve_columns(self, row, fields=()): """ This routine is necessary so that distances and geometries returned from extra selection SQL get resolved appropriately into Python objects. """ values = [] aliases = list(self.query.extra_select) # Have to set a starting row number offset that is used for # determining the correct starting row index -- needed for # doing pagination with Oracle. rn_offset = 0 if self.connection.ops.oracle: if self.query.high_mark is not None or self.query.low_mark: rn_offset = 1 index_start = rn_offset + len(aliases) # Converting any extra selection values (e.g., geometries and # distance objects added by GeoQuerySet methods). values = [self.query.convert_values(v, self.query.extra_select_fields.get(a, None), self.connection) for v, a in zip(row[rn_offset:index_start], aliases)] if self.connection.ops.oracle or getattr(self.query, 'geo_values', False): # We resolve the rest of the columns if we're on Oracle or if # the `geo_values` attribute is defined. for value, field in zip_longest(row[index_start:], fields): values.append(self.query.convert_values(value, field, self.connection)) else: values.extend(row[index_start:]) return tuple(values) #### Routines unique to GeoQuery #### def get_extra_select_format(self, alias): sel_fmt = '%s' if hasattr(self.query, 'custom_select') and alias in self.query.custom_select: sel_fmt = sel_fmt % self.query.custom_select[alias] return sel_fmt def get_field_select(self, field, alias=None, column=None): """ Returns the SELECT SQL string for the given field. Figures out if any custom selection SQL is needed for the column The `alias` keyword may be used to manually specify the database table where the column exists, if not in the model associated with this `GeoQuery`. Similarly, `column` may be used to specify the exact column name, rather than using the `column` attribute on `field`. """ sel_fmt = self.get_select_format(field) if field in self.query.custom_select: field_sel = sel_fmt % self.query.custom_select[field] else: field_sel = sel_fmt % self._field_column(field, alias, column) return field_sel def get_select_format(self, fld): """ Returns the selection format string, depending on the requirements of the spatial backend. For example, Oracle and MySQL require custom selection formats in order to retrieve geometries in OGC WKT. For all other fields a simple '%s' format string is returned. """ if self.connection.ops.select and hasattr(fld, 'geom_type'): # This allows operations to be done on fields in the SELECT, # overriding their values -- used by the Oracle and MySQL # spatial backends to get database values as WKT, and by the # `transform` method. sel_fmt = self.connection.ops.select # Because WKT doesn't contain spatial reference information, # the SRID is prefixed to the returned WKT to ensure that the # transformed geometries have an SRID different than that of the # field -- this is only used by `transform` for Oracle and # SpatiaLite backends. if self.query.transformed_srid and ( self.connection.ops.oracle or self.connection.ops.spatialite ): sel_fmt = "'SRID=%d;'||%s" % (self.query.transformed_srid, sel_fmt) else: sel_fmt = '%s' return sel_fmt # Private API utilities, subject to change. def _field_column(self, field, table_alias=None, column=None): """ Helper function that returns the database column for the given field. The table and column are returned (quoted) in the proper format, e.g., `"geoapp_city"."point"`. If `table_alias` is not specified, the database table associated with the model of this `GeoQuery` will be used. If `column` is specified, it will be used instead of the value in `field.column`. """ if table_alias is None: table_alias = self.query.model._meta.db_table return "%s.%s" % (self.quote_name_unless_alias(table_alias), self.connection.ops.quote_name(column or field.column)) class SQLInsertCompiler(compiler.SQLInsertCompiler, GeoSQLCompiler): pass class SQLDeleteCompiler(compiler.SQLDeleteCompiler, GeoSQLCompiler): pass class SQLUpdateCompiler(compiler.SQLUpdateCompiler, GeoSQLCompiler): pass class SQLAggregateCompiler(compiler.SQLAggregateCompiler, GeoSQLCompiler): pass class SQLDateCompiler(compiler.SQLDateCompiler, GeoSQLCompiler): """ This is overridden for GeoDjango to properly cast date columns, since `GeoQuery.resolve_columns` is used for spatial values. See #14648, #16757. """ def results_iter(self): if self.connection.ops.oracle: from django.db.models.fields import DateTimeField fields = [DateTimeField()] else: needs_string_cast = self.connection.features.needs_datetime_string_cast offset = len(self.query.extra_select) for rows in self.execute_sql(MULTI): for row in rows: date = row[offset] if self.connection.ops.oracle: date = self.resolve_columns(row, fields)[offset] elif needs_string_cast: date = typecast_timestamp(str(date)) yield date
mit
ar7z1/ansible
lib/ansible/modules/messaging/rabbitmq_policy.py
16
4535
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2013, John Dewey <[email protected]> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: rabbitmq_policy short_description: Manage the state of policies in RabbitMQ. description: - Manage the state of a policy in RabbitMQ. version_added: "1.5" author: "John Dewey (@retr0h)" options: name: description: - The name of the policy to manage. required: true vhost: description: - The name of the vhost to apply to. default: / apply_to: description: - What the policy applies to. Requires RabbitMQ 3.2.0 or later. default: all choices: [all, exchanges, queues] version_added: "2.1" pattern: description: - A regex of queues to apply the policy to. required: true tags: description: - A dict or string describing the policy. required: true priority: description: - The priority of the policy. default: 0 node: description: - Erlang node name of the rabbit we wish to configure. default: rabbit state: description: - The state of the policy. default: present choices: [present, absent] ''' EXAMPLES = ''' - name: ensure the default vhost contains the HA policy via a dict rabbitmq_policy: name: HA pattern: .* args: tags: ha-mode: all - name: ensure the default vhost contains the HA policy rabbitmq_policy: name: HA pattern: .* tags: ha-mode: all ''' import json from ansible.module_utils.basic import AnsibleModule class RabbitMqPolicy(object): def __init__(self, module, name): self._module = module self._name = name self._vhost = module.params['vhost'] self._pattern = module.params['pattern'] self._apply_to = module.params['apply_to'] self._tags = module.params['tags'] self._priority = module.params['priority'] self._node = module.params['node'] self._rabbitmqctl = module.get_bin_path('rabbitmqctl', True) def _exec(self, args, run_in_check_mode=False): if not self._module.check_mode or (self._module.check_mode and run_in_check_mode): cmd = [self._rabbitmqctl, '-q', '-n', self._node] args.insert(1, '-p') args.insert(2, self._vhost) rc, out, err = self._module.run_command(cmd + args, check_rc=True) return out.splitlines() return list() def list(self): policies = self._exec(['list_policies'], True) for policy in policies: if not policy: continue policy_name = policy.split('\t')[1] if policy_name == self._name: return True return False def set(self): args = ['set_policy'] args.append(self._name) args.append(self._pattern) args.append(json.dumps(self._tags)) args.append('--priority') args.append(self._priority) if self._apply_to != 'all': args.append('--apply-to') args.append(self._apply_to) return self._exec(args) def clear(self): return self._exec(['clear_policy', self._name]) def main(): arg_spec = dict( name=dict(required=True), vhost=dict(default='/'), pattern=dict(required=True), apply_to=dict(default='all', choices=['all', 'exchanges', 'queues']), tags=dict(type='dict', required=True), priority=dict(default='0'), node=dict(default='rabbit'), state=dict(default='present', choices=['present', 'absent']), ) module = AnsibleModule( argument_spec=arg_spec, supports_check_mode=True ) name = module.params['name'] state = module.params['state'] rabbitmq_policy = RabbitMqPolicy(module, name) result = dict(changed=False, name=name, state=state) if rabbitmq_policy.list(): if state == 'absent': rabbitmq_policy.clear() result['changed'] = True else: result['changed'] = False elif state == 'present': rabbitmq_policy.set() result['changed'] = True module.exit_json(**result) if __name__ == '__main__': main()
gpl-3.0
knoguchi/kenix-scm
server/lib/boto/swf/layer1_decisions.py
18
11934
""" Helper class for creating decision responses. """ class Layer1Decisions(object): """ Use this object to build a list of decisions for a decision response. Each method call will add append a new decision. Retrieve the list of decisions from the _data attribute. """ def __init__(self): self._data = [] def schedule_activity_task(self, activity_id, activity_type_name, activity_type_version, task_list=None, control=None, heartbeat_timeout=None, schedule_to_close_timeout=None, schedule_to_start_timeout=None, start_to_close_timeout=None, input=None): """ Schedules an activity task. :type activity_id: string :param activity_id: The activityId of the type of the activity being scheduled. :type activity_type_name: string :param activity_type_name: The name of the type of the activity being scheduled. :type activity_type_version: string :param activity_type_version: The version of the type of the activity being scheduled. :type task_list: string :param task_list: If set, specifies the name of the task list in which to schedule the activity task. If not specified, the defaultTaskList registered with the activity type will be used. Note: a task list for this activity task must be specified either as a default for the activity type or through this field. If neither this field is set nor a default task list was specified at registration time then a fault will be returned. """ o = {} o['decisionType'] = 'ScheduleActivityTask' attrs = o['scheduleActivityTaskDecisionAttributes'] = {} attrs['activityId'] = activity_id attrs['activityType'] = { 'name': activity_type_name, 'version': activity_type_version, } if task_list is not None: attrs['taskList'] = {'name': task_list} if control is not None: attrs['control'] = control if heartbeat_timeout is not None: attrs['heartbeatTimeout'] = heartbeat_timeout if schedule_to_close_timeout is not None: attrs['scheduleToCloseTimeout'] = schedule_to_close_timeout if schedule_to_start_timeout is not None: attrs['scheduleToStartTimeout'] = schedule_to_start_timeout if start_to_close_timeout is not None: attrs['startToCloseTimeout'] = start_to_close_timeout if input is not None: attrs['input'] = input self._data.append(o) def request_cancel_activity_task(self, activity_id): """ Attempts to cancel a previously scheduled activity task. If the activity task was scheduled but has not been assigned to a worker, then it will be canceled. If the activity task was already assigned to a worker, then the worker will be informed that cancellation has been requested in the response to RecordActivityTaskHeartbeat. """ o = {} o['decisionType'] = 'RequestCancelActivityTask' attrs = o['requestCancelActivityTaskDecisionAttributes'] = {} attrs['activityId'] = activity_id self._data.append(o) def record_marker(self, marker_name, details=None): """ Records a MarkerRecorded event in the history. Markers can be used for adding custom information in the history for instance to let deciders know that they do not need to look at the history beyond the marker event. """ o = {} o['decisionType'] = 'RecordMarker' attrs = o['recordMarkerDecisionAttributes'] = {} attrs['markerName'] = marker_name if details is not None: attrs['details'] = details self._data.append(o) def complete_workflow_execution(self, result=None): """ Closes the workflow execution and records a WorkflowExecutionCompleted event in the history """ o = {} o['decisionType'] = 'CompleteWorkflowExecution' attrs = o['completeWorkflowExecutionDecisionAttributes'] = {} if result is not None: attrs['result'] = result self._data.append(o) def fail_workflow_execution(self, reason=None, details=None): """ Closes the workflow execution and records a WorkflowExecutionFailed event in the history. """ o = {} o['decisionType'] = 'FailWorkflowExecution' attrs = o['failWorkflowExecutionDecisionAttributes'] = {} if reason is not None: attrs['reason'] = reason if details is not None: attrs['details'] = details self._data.append(o) def cancel_workflow_executions(self, details=None): """ Closes the workflow execution and records a WorkflowExecutionCanceled event in the history. """ o = {} o['decisionType'] = 'CancelWorkflowExecution' attrs = o['cancelWorkflowExecutionsDecisionAttributes'] = {} if details is not None: attrs['details'] = details self._data.append(o) def continue_as_new_workflow_execution(self, child_policy=None, execution_start_to_close_timeout=None, input=None, tag_list=None, task_list=None, start_to_close_timeout=None, workflow_type_version=None): """ Closes the workflow execution and starts a new workflow execution of the same type using the same workflow id and a unique run Id. A WorkflowExecutionContinuedAsNew event is recorded in the history. """ o = {} o['decisionType'] = 'ContinueAsNewWorkflowExecution' attrs = o['continueAsNewWorkflowExecutionDecisionAttributes'] = {} if child_policy is not None: attrs['childPolicy'] = child_policy if execution_start_to_close_timeout is not None: attrs['executionStartToCloseTimeout'] = execution_start_to_close_timeout if input is not None: attrs['input'] = input if tag_list is not None: attrs['tagList'] = tag_list if task_list is not None: attrs['taskList'] = {'name': task_list} if start_to_close_timeout is not None: attrs['startToCloseTimeout'] = start_to_close_timeout if workflow_type_version is not None: attrs['workflowTypeVersion'] = workflow_type_version self._data.append(o) def start_timer(self, start_to_fire_timeout, timer_id, control=None): """ Starts a timer for this workflow execution and records a TimerStarted event in the history. This timer will fire after the specified delay and record a TimerFired event. """ o = {} o['decisionType'] = 'StartTimer' attrs = o['startTimerDecisionAttributes'] = {} attrs['startToFireTimeout'] = start_to_fire_timeout attrs['timerId'] = timer_id if control is not None: attrs['control'] = control self._data.append(o) def cancel_timer(self, timer_id): """ Cancels a previously started timer and records a TimerCanceled event in the history. """ o = {} o['decisionType'] = 'CancelTimer' attrs = o['cancelTimerDecisionAttributes'] = {} attrs['timerId'] = timer_id self._data.append(o) def signal_external_workflow_execution(self, workflow_id, signal_name, run_id=None, control=None, input=None): """ Requests a signal to be delivered to the specified external workflow execution and records a SignalExternalWorkflowExecutionInitiated event in the history. """ o = {} o['decisionType'] = 'SignalExternalWorkflowExecution' attrs = o['signalExternalWorkflowExecutionDecisionAttributes'] = {} attrs['workflowId'] = workflow_id attrs['signalName'] = signal_name if run_id is not None: attrs['runId'] = run_id if control is not None: attrs['control'] = control if input is not None: attrs['input'] = input self._data.append(o) def request_cancel_external_workflow_execution(self, workflow_id, control=None, run_id=None): """ Requests that a request be made to cancel the specified external workflow execution and records a RequestCancelExternalWorkflowExecutionInitiated event in the history. """ o = {} o['decisionType'] = 'RequestCancelExternalWorkflowExecution' attrs = o['requestCancelExternalWorkflowExecutionDecisionAttributes'] = {} attrs['workflowId'] = workflow_id if control is not None: attrs['control'] = control if run_id is not None: attrs['runId'] = run_id self._data.append(o) def start_child_workflow_execution(self, workflow_type_name, workflow_type_version, workflow_id, child_policy=None, control=None, execution_start_to_close_timeout=None, input=None, tag_list=None, task_list=None, task_start_to_close_timeout=None): """ Requests that a child workflow execution be started and records a StartChildWorkflowExecutionInitiated event in the history. The child workflow execution is a separate workflow execution with its own history. """ o = {} o['decisionType'] = 'StartChildWorkflowExecution' attrs = o['startChildWorkflowExecutionDecisionAttributes'] = {} attrs['workflowType'] = { 'name': workflow_type_name, 'version': workflow_type_version, } attrs['workflowId'] = workflow_id if child_policy is not None: attrs['childPolicy'] = child_policy if control is not None: attrs['control'] = control if execution_start_to_close_timeout is not None: attrs['executionStartToCloseTimeout'] = execution_start_to_close_timeout if input is not None: attrs['input'] = input if tag_list is not None: attrs['tagList'] = tag_list if task_list is not None: attrs['taskList'] = {'name': task_list} if task_start_to_close_timeout is not None: attrs['taskStartToCloseTimeout'] = task_start_to_close_timeout self._data.append(o)
apache-2.0
JingJunYin/tensorflow
tensorflow/tools/api/generator/create_python_api_test.py
32
2857
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================= """Tests for create_python_api.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import imp import sys from tensorflow.python.platform import test from tensorflow.python.util.tf_export import tf_export from tensorflow.tools.api.generator import create_python_api @tf_export('test_op', 'test_op1') def test_op(): pass @tf_export('TestClass', 'NewTestClass') class TestClass(object): pass _TEST_CONSTANT = 5 _MODULE_NAME = 'test.tensorflow.test_module' class CreatePythonApiTest(test.TestCase): def setUp(self): # Add fake op to a module that has 'tensorflow' in the name. sys.modules[_MODULE_NAME] = imp.new_module(_MODULE_NAME) setattr(sys.modules[_MODULE_NAME], 'test_op', test_op) setattr(sys.modules[_MODULE_NAME], 'TestClass', TestClass) test_op.__module__ = _MODULE_NAME TestClass.__module__ = _MODULE_NAME tf_export('consts._TEST_CONSTANT').export_constant( _MODULE_NAME, '_TEST_CONSTANT') def tearDown(self): del sys.modules[_MODULE_NAME] def testFunctionImportIsAdded(self): imports = create_python_api.get_api_imports() expected_import = ( 'from test.tensorflow.test_module import test_op as test_op1') self.assertTrue( expected_import in str(imports), msg='%s not in %s' % (expected_import, str(imports))) expected_import = 'from test.tensorflow.test_module import test_op' self.assertTrue( expected_import in str(imports), msg='%s not in %s' % (expected_import, str(imports))) def testClassImportIsAdded(self): imports = create_python_api.get_api_imports() expected_import = 'from test.tensorflow.test_module import TestClass' self.assertTrue( 'TestClass' in str(imports), msg='%s not in %s' % (expected_import, str(imports))) def testConstantIsAdded(self): imports = create_python_api.get_api_imports() expected = 'from test.tensorflow.test_module import _TEST_CONSTANT' self.assertTrue(expected in str(imports), msg='%s not in %s' % (expected, str(imports))) if __name__ == '__main__': test.main()
apache-2.0
mhogg/scipy
scipy/sparse/linalg/isolve/setup.py
108
1408
#!/usr/bin/env python from __future__ import division, print_function, absolute_import from os.path import join def configuration(parent_package='',top_path=None): from numpy.distutils.system_info import get_info, NotFoundError from numpy.distutils.misc_util import Configuration from scipy._build_utils import get_g77_abi_wrappers config = Configuration('isolve',parent_package,top_path) lapack_opt = get_info('lapack_opt') if not lapack_opt: raise NotFoundError('no lapack/blas resources found') # iterative methods methods = ['BiCGREVCOM.f.src', 'BiCGSTABREVCOM.f.src', 'CGREVCOM.f.src', 'CGSREVCOM.f.src', # 'ChebyREVCOM.f.src', 'GMRESREVCOM.f.src', # 'JacobiREVCOM.f.src', 'QMRREVCOM.f.src', # 'SORREVCOM.f.src' ] Util = ['STOPTEST2.f.src','getbreak.f.src'] sources = Util + methods + ['_iterative.pyf.src'] sources = [join('iterative', x) for x in sources] sources += get_g77_abi_wrappers(lapack_opt) config.add_extension('_iterative', sources=sources, extra_info=lapack_opt) config.add_data_dir('tests') return config if __name__ == '__main__': from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
bsd-3-clause
Basis/pip
pip/_vendor/html5lib/filters/optionaltags.py
1727
10500
from __future__ import absolute_import, division, unicode_literals from . import _base class Filter(_base.Filter): def slider(self): previous1 = previous2 = None for token in self.source: if previous1 is not None: yield previous2, previous1, token previous2 = previous1 previous1 = token yield previous2, previous1, None def __iter__(self): for previous, token, next in self.slider(): type = token["type"] if type == "StartTag": if (token["data"] or not self.is_optional_start(token["name"], previous, next)): yield token elif type == "EndTag": if not self.is_optional_end(token["name"], next): yield token else: yield token def is_optional_start(self, tagname, previous, next): type = next and next["type"] or None if tagname in 'html': # An html element's start tag may be omitted if the first thing # inside the html element is not a space character or a comment. return type not in ("Comment", "SpaceCharacters") elif tagname == 'head': # A head element's start tag may be omitted if the first thing # inside the head element is an element. # XXX: we also omit the start tag if the head element is empty if type in ("StartTag", "EmptyTag"): return True elif type == "EndTag": return next["name"] == "head" elif tagname == 'body': # A body element's start tag may be omitted if the first thing # inside the body element is not a space character or a comment, # except if the first thing inside the body element is a script # or style element and the node immediately preceding the body # element is a head element whose end tag has been omitted. if type in ("Comment", "SpaceCharacters"): return False elif type == "StartTag": # XXX: we do not look at the preceding event, so we never omit # the body element's start tag if it's followed by a script or # a style element. return next["name"] not in ('script', 'style') else: return True elif tagname == 'colgroup': # A colgroup element's start tag may be omitted if the first thing # inside the colgroup element is a col element, and if the element # is not immediately preceeded by another colgroup element whose # end tag has been omitted. if type in ("StartTag", "EmptyTag"): # XXX: we do not look at the preceding event, so instead we never # omit the colgroup element's end tag when it is immediately # followed by another colgroup element. See is_optional_end. return next["name"] == "col" else: return False elif tagname == 'tbody': # A tbody element's start tag may be omitted if the first thing # inside the tbody element is a tr element, and if the element is # not immediately preceeded by a tbody, thead, or tfoot element # whose end tag has been omitted. if type == "StartTag": # omit the thead and tfoot elements' end tag when they are # immediately followed by a tbody element. See is_optional_end. if previous and previous['type'] == 'EndTag' and \ previous['name'] in ('tbody', 'thead', 'tfoot'): return False return next["name"] == 'tr' else: return False return False def is_optional_end(self, tagname, next): type = next and next["type"] or None if tagname in ('html', 'head', 'body'): # An html element's end tag may be omitted if the html element # is not immediately followed by a space character or a comment. return type not in ("Comment", "SpaceCharacters") elif tagname in ('li', 'optgroup', 'tr'): # A li element's end tag may be omitted if the li element is # immediately followed by another li element or if there is # no more content in the parent element. # An optgroup element's end tag may be omitted if the optgroup # element is immediately followed by another optgroup element, # or if there is no more content in the parent element. # A tr element's end tag may be omitted if the tr element is # immediately followed by another tr element, or if there is # no more content in the parent element. if type == "StartTag": return next["name"] == tagname else: return type == "EndTag" or type is None elif tagname in ('dt', 'dd'): # A dt element's end tag may be omitted if the dt element is # immediately followed by another dt element or a dd element. # A dd element's end tag may be omitted if the dd element is # immediately followed by another dd element or a dt element, # or if there is no more content in the parent element. if type == "StartTag": return next["name"] in ('dt', 'dd') elif tagname == 'dd': return type == "EndTag" or type is None else: return False elif tagname == 'p': # A p element's end tag may be omitted if the p element is # immediately followed by an address, article, aside, # blockquote, datagrid, dialog, dir, div, dl, fieldset, # footer, form, h1, h2, h3, h4, h5, h6, header, hr, menu, # nav, ol, p, pre, section, table, or ul, element, or if # there is no more content in the parent element. if type in ("StartTag", "EmptyTag"): return next["name"] in ('address', 'article', 'aside', 'blockquote', 'datagrid', 'dialog', 'dir', 'div', 'dl', 'fieldset', 'footer', 'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'header', 'hr', 'menu', 'nav', 'ol', 'p', 'pre', 'section', 'table', 'ul') else: return type == "EndTag" or type is None elif tagname == 'option': # An option element's end tag may be omitted if the option # element is immediately followed by another option element, # or if it is immediately followed by an <code>optgroup</code> # element, or if there is no more content in the parent # element. if type == "StartTag": return next["name"] in ('option', 'optgroup') else: return type == "EndTag" or type is None elif tagname in ('rt', 'rp'): # An rt element's end tag may be omitted if the rt element is # immediately followed by an rt or rp element, or if there is # no more content in the parent element. # An rp element's end tag may be omitted if the rp element is # immediately followed by an rt or rp element, or if there is # no more content in the parent element. if type == "StartTag": return next["name"] in ('rt', 'rp') else: return type == "EndTag" or type is None elif tagname == 'colgroup': # A colgroup element's end tag may be omitted if the colgroup # element is not immediately followed by a space character or # a comment. if type in ("Comment", "SpaceCharacters"): return False elif type == "StartTag": # XXX: we also look for an immediately following colgroup # element. See is_optional_start. return next["name"] != 'colgroup' else: return True elif tagname in ('thead', 'tbody'): # A thead element's end tag may be omitted if the thead element # is immediately followed by a tbody or tfoot element. # A tbody element's end tag may be omitted if the tbody element # is immediately followed by a tbody or tfoot element, or if # there is no more content in the parent element. # A tfoot element's end tag may be omitted if the tfoot element # is immediately followed by a tbody element, or if there is no # more content in the parent element. # XXX: we never omit the end tag when the following element is # a tbody. See is_optional_start. if type == "StartTag": return next["name"] in ['tbody', 'tfoot'] elif tagname == 'tbody': return type == "EndTag" or type is None else: return False elif tagname == 'tfoot': # A tfoot element's end tag may be omitted if the tfoot element # is immediately followed by a tbody element, or if there is no # more content in the parent element. # XXX: we never omit the end tag when the following element is # a tbody. See is_optional_start. if type == "StartTag": return next["name"] == 'tbody' else: return type == "EndTag" or type is None elif tagname in ('td', 'th'): # A td element's end tag may be omitted if the td element is # immediately followed by a td or th element, or if there is # no more content in the parent element. # A th element's end tag may be omitted if the th element is # immediately followed by a td or th element, or if there is # no more content in the parent element. if type == "StartTag": return next["name"] in ('td', 'th') else: return type == "EndTag" or type is None return False
mit
uhlik/blendmaxwell
mxs.py
2
222633
#!/Library/Frameworks/Python.framework/Versions/3.5/bin/python3 # -*- coding: utf-8 -*- # The MIT License (MIT) # # Copyright (c) 2015 Jakub Uhlík # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is furnished # to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. import os import platform import datetime import struct import math import sys import numpy from .log import log, LogStyles from . import utils s = platform.system() if(s == 'Darwin'): pass elif(s == 'Linux'): try: from pymaxwell import * except ImportError: mp = os.environ.get("MAXWELL3_ROOT") if(not mp): raise OSError("missing MAXWELL3_ROOT environment variable") pp = os.path.abspath(os.path.join(mp, 'python', 'pymaxwell', 'python3.5')) if(not os.path.exists(pp)): raise OSError("pymaxwell for python 3.5 does not exist ({})".format(pp)) sys.path.insert(0, pp) from pymaxwell import * elif(s == 'Windows'): try: from pymaxwell import * except ImportError: mp = os.environ.get("MAXWELL3_ROOT") if(not mp): raise OSError("missing MAXWELL3_ROOT environment variable") pp = os.path.abspath(os.path.join(mp, 'python', 'pymaxwell', 'python3.5')) if(not os.path.exists(pp)): raise OSError("pymaxwell for python 3.5 does not exist ({})".format(pp)) sys.path.insert(0, pp) os.environ['PATH'] = ';'.join([mp, os.environ['PATH']]) from pymaxwell import * def read_mxm_preview(path): import numpy s = Cmaxwell(mwcallback) m = s.readMaterial(path) a, _ = m.getPreview() r = numpy.copy(a) return r def material_preview_scene(scene, tmp_dir, quality, ): s = Cmaxwell(mwcallback) log('reading scene: {}'.format(scene), 2) ok = s.readMXS(scene) if(not ok): log('error reading scene: {}'.format(scene), 2, LogStyles.ERROR, ) return None def get_material_names(s): it = CmaxwellMaterialIterator() o = it.first(s) l = [] while not o.isNull(): name = o.getName() l.append(name) o = it.next() return l names = get_material_names(s) for n in names: if(n.lower() == 'preview'): break log('swapping material: {}'.format(n), 2) material = s.getMaterial(n) p = os.path.join(tmp_dir, 'material.mxm') material.read(p) material.forceToWriteIntoScene() log('setting parameters..', 2) s.setRenderParameter('ENGINE', bytes(quality, encoding='UTF-8')) exr = os.path.join(tmp_dir, "render.exr") s.setPath('RENDER', exr, 32) s.setRenderParameter('DO NOT SAVE MXI FILE', False) s.setRenderParameter('DO NOT SAVE IMAGE FILE', False) src_dir, _ = os.path.split(scene) ok = s.addSearchingPath(src_dir) sp = os.path.join(tmp_dir, "scene.mxs") log('writing scene: {}'.format(sp), 2) ok = s.writeMXS(sp) if(not ok): log('error writing scene: {}'.format(sp), 2, LogStyles.ERROR, ) return None log('done.', 2) return sp def material_preview_mxi(tmp_dir): mp = os.path.join(tmp_dir, 'render.mxi') ep = os.path.join(tmp_dir, 'render.exr') import numpy a = numpy.zeros((1, 1, 3), dtype=numpy.float, ) if(os.path.exists(mp)): log('reading mxi: {}'.format(mp), 2) i = CmaxwellMxi() i.read(mp) a, _ = i.getRenderBuffer(32) elif(os.path.exists(ep)): log('reading exr: {}'.format(ep), 2) i = CmaxwellMxi() i.readImage(ep) i.write(mp) a, _ = i.getRenderBuffer(32) else: log('image not found..', 2) return a def viewport_render_scene(tmp_dir, quality, ): s = Cmaxwell(mwcallback) p = os.path.join(tmp_dir, "scene.mxs") ok = s.readMXS(p) if(not ok): return False s.setRenderParameter('ENGINE', bytes(quality, encoding='UTF-8')) mxi = os.path.join(tmp_dir, "render.mxi") s.setRenderParameter('MXI FULLNAME', bytes(mxi, encoding='UTF-8')) exr = os.path.join(tmp_dir, "render.exr") s.setPath('RENDER', exr, 32) s.setRenderParameter('DO NOT SAVE MXI FILE', False) s.setRenderParameter('DO NOT SAVE IMAGE FILE', False) # turn off channels s.setRenderParameter('EMBED CHANNELS', 1) ls = ['DO ALPHA CHANNEL', 'DO IDOBJECT CHANNEL', 'DO IDMATERIAL CHANNEL', 'DO SHADOW PASS CHANNEL', 'DO MOTION CHANNEL', 'DO ROUGHNESS CHANNEL', 'DO FRESNEL CHANNEL', 'DO NORMALS CHANNEL', 'DO POSITION CHANNEL', 'DO ZBUFFER CHANNEL', 'DO DEEP CHANNEL', 'DO UV CHANNEL', 'DO ALPHA CUSTOM CHANNEL', 'DO REFLECTANCE CHANNEL', ] for n in ls: s.setRenderParameter(n, 0) ok = s.writeMXS(p) if(not ok): return False return True def viewport_render_mxi(tmp_dir): ep = os.path.join(tmp_dir, 'render2.exr') a = numpy.zeros((1, 1, 3), dtype=numpy.float, ) if(os.path.exists(ep)): log('reading exr: {}'.format(ep), 2) i = CmaxwellMxi() i.readImage(ep) # i.write(mp) a, _ = i.getRenderBuffer(32) else: log('image not found..', 2, LogStyles.ERROR) return a class MXSWriter(): def __init__(self, path, append=False, ): """Create scene or load existing. path string (path) append bool """ if(__name__ != "__main__"): if(platform.system() == 'Darwin'): raise ImportError("No pymaxwell directly in Blender on Mac OS X..") log(self.__class__.__name__, 1, LogStyles.MESSAGE, prefix="* ", ) self.path = path self.mxs = Cmaxwell(mwcallback) pid = utils.get_plugin_id() if(pid != ""): # write here directly, even though it is also part of scene data, but api change just for this is pointless.. self.mxs.setPluginID(pid) if(append): log("appending to existing scene..", 2, prefix="* ", ) self.mxs.readMXS(self.path) else: log("creating new scene..", 2, prefix="* ", ) self.mgr = CextensionManager.instance() self.mgr.loadAllExtensions() def write(self): """Write scene fo file. (no parameters..) """ log("saving scene..", 2) ok = self.mxs.writeMXS(self.path) log("done.", 2) return ok def erase_unused_materials(self): self.mxs.eraseUnusedMaterials() def set_base_and_pivot(self, o, matrix=None, motion=None, ): """Convert float tuples to Cbases and set to object. o CmaxwellObject base ((3 float), (3 float), (3 float), (3 float)) or None pivot ((3 float), (3 float), (3 float), (3 float)) or None """ if(matrix is None): matrix = ([[0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]], [[0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]) base = matrix[0] pivot = matrix[1] l = matrix[2] r = matrix[3] s = matrix[4] b = Cbase() b.origin = Cvector(*base[0]) b.xAxis = Cvector(*base[1]) b.yAxis = Cvector(*base[2]) b.zAxis = Cvector(*base[3]) p = Cbase() p.origin = Cvector(*pivot[0]) p.xAxis = Cvector(*pivot[1]) p.yAxis = Cvector(*pivot[2]) p.zAxis = Cvector(*pivot[3]) o.setBaseAndPivot(b, p) o.setPivotPosition(Cvector(*l)) o.setPivotRotation(Cvector(*r)) o.setPosition(Cvector(*l)) o.setRotation(Cvector(*r)) o.setScale(Cvector(*s)) if(motion is not None): for(t, _, b, p) in motion: bb = Cbase() bb.origin = Cvector(*b[0]) bb.xAxis = Cvector(*b[1]) bb.yAxis = Cvector(*b[2]) bb.zAxis = Cvector(*b[3]) pp = Cbase() pp.origin = Cvector(*p[0]) pp.xAxis = Cvector(*p[1]) pp.yAxis = Cvector(*p[2]) pp.zAxis = Cvector(*p[3]) o.setBaseAndPivot(bb, pp, t, ) def set_object_props(self, o, hide=False, opacity=100, cid=(1.0, 1.0, 1.0), hcam=False, hcamsc=False, hgi=False, hrr=False, hzcp=False, blocked_emitters=None, ): """Set common object properties. o CmaxwellObject hide bool opacity float cid (float, float, float) 0.0 - 1.0 rgb hcam bool hcamsc bool hgi bool hrr bool hzcp bool blocked_emitters list of blocked emitter object names """ if(hide): o.setHide(hide) if(opacity != 100.0): o.setOpacity(opacity) c = Crgb() c.assign(*cid) o.setColorID(c) if(hcam): o.setHideToCamera(True) if(hcamsc): o.setHideToCameraInShadowsPass(True) if(hgi): o.setHideToGI(True) if(hrr): o.setHideToReflectionsRefractions(True) if(hzcp): o.excludeOfCutPlanes(True) if(blocked_emitters): for n in blocked_emitters: ok = o.addExcludedLight(n) def texture_data_to_mxparams(self, name, data, mxparams, ): """Create CtextureMap, fill with parameters and put into mxparams. name string data dict {'type': string, 'path': string, 'channel': int, 'use_global_map': bool, 'tile_method_type': [bool, bool], 'tile_method_units': int, 'repeat': [float, float], 'mirror': [bool, bool], 'offset': [float, float], 'rotation': float, 'invert': bool, 'alpha_only': bool, 'interpolation': bool, 'brightness': float, 'contrast': float, 'saturation': float, 'hue': float, 'rgb_clamp': [float, float], } mxparams mxparams """ d = data if(d is None): return t = CtextureMap() t.setPath(d['path']) v = Cvector2D() v.assign(*d['repeat']) t.scale = v v = Cvector2D() v.assign(*d['offset']) t.offset = v t.rotation = d['rotation'] t.uvwChannelID = d['channel'] t.uIsTiled = d['tile_method_type'][0] t.vIsTiled = d['tile_method_type'][1] t.uIsMirrored = d['mirror'][0] t.vIsMirrored = d['mirror'][1] t.invert = d['invert'] # t.doGammaCorrection = 0 t.useAbsoluteUnits = d['tile_method_units'] t.normalMappingFlipRed = d['normal_mapping_flip_red'] t.normalMappingFlipGreen = d['normal_mapping_flip_green'] t.normalMappingFullRangeBlue = d['normal_mapping_full_range_blue'] t.useAlpha = d['alpha_only'] t.typeInterpolation = d['interpolation'] t.saturation = d['saturation'] / 100 t.contrast = d['contrast'] / 100 t.brightness = d['brightness'] / 100 t.hue = d['hue'] / 180 t.clampMin = d['rgb_clamp'][0] / 255 t.clampMax = d['rgb_clamp'][1] / 255 t.useGlobalMap = d['use_global_map'] # t.cosA = 1.000000 # t.sinA = 0.000000 ok = mxparams.setTextureMap(name, t) return mxparams def texture(self, d, ): """Create CtextureMap from parameters d dict """ if(d is None): return s = self.mxs t = CtextureMap() t.setPath(d['path']) t.uvwChannelID = d['channel'] t.brightness = d['brightness'] / 100 t.contrast = d['contrast'] / 100 t.saturation = d['saturation'] / 100 t.hue = d['hue'] / 180 t.useGlobalMap = d['use_global_map'] t.useAbsoluteUnits = d['tile_method_units'] t.uIsTiled = d['tile_method_type'][0] t.vIsTiled = d['tile_method_type'][1] t.uIsMirrored = d['mirror'][0] t.vIsMirrored = d['mirror'][1] vec = Cvector2D() vec.assign(d['offset'][0], d['offset'][1]) t.offset = vec t.rotation = d['rotation'] t.invert = d['invert'] t.useAlpha = d['alpha_only'] if(d['interpolation']): t.typeInterpolation = 1 else: t.typeInterpolation = 0 t.clampMin = d['rgb_clamp'][0] / 255 t.clampMax = d['rgb_clamp'][1] / 255 vec = Cvector2D() vec.assign(d['repeat'][0], d['repeat'][1]) t.scale = vec t.normalMappingFlipRed = d['normal_mapping_flip_red'] t.normalMappingFlipGreen = d['normal_mapping_flip_green'] t.normalMappingFullRangeBlue = d['normal_mapping_full_range_blue'] for i, pt in enumerate(d['procedural']): if(pt['use'] == 'BRICK'): e = self.mgr.createDefaultTextureExtension('Brick') p = e.getExtensionData() p.setFloat('Blend procedural', pt['blending_factor']) p.setFloat('Brick width', pt['brick_brick_width']) p.setFloat('Brick height', pt['brick_brick_height']) p.setInt('Brick offset', pt['brick_brick_offset']) p.setInt('Random offset', pt['brick_random_offset']) p.setByte('Double brick', pt['brick_double_brick']) p.setFloat('Small brick width', pt['brick_small_brick_width']) p.setByte('Round corners', pt['brick_round_corners']) p.setFloat('Boundary sharpness U', pt['brick_boundary_sharpness_u']) p.setFloat('Boundary sharpness V', pt['brick_boundary_sharpness_v']) p.setInt('Boundary noise detail', pt['brick_boundary_noise_detail']) p.setFloat('Boundary noise region U', pt['brick_boundary_noise_region_u']) p.setFloat('Boundary noise region V', pt['brick_boundary_noise_region_v']) p.setUInt('Seed', pt['brick_seed']) p.setByte('Random rotation', pt['brick_random_rotation']) p.setInt('Color variation', pt['brick_color_variation']) c = Crgb() c.assign(*pt['brick_brick_color_0']) p.setRgb('Brick color 0', c) self.texture_data_to_mxparams('Brick texture 0', pt['brick_brick_texture_0'], p, ) p.setInt('Sampling factor 0', pt['brick_sampling_factor_0']) p.setInt('Weight 0', pt['brick_weight_0']) c = Crgb() c.assign(*pt['brick_brick_color_1']) p.setRgb('Brick color 1', c) self.texture_data_to_mxparams('Brick texture 1', pt['brick_brick_texture_1'], p, ) p.setInt('Sampling factor 1', pt['brick_sampling_factor_1']) p.setInt('Weight 1', pt['brick_weight_1']) c = Crgb() c.assign(*pt['brick_brick_color_2']) p.setRgb('Brick color 2', c) self.texture_data_to_mxparams('Brick texture 2', pt['brick_brick_texture_2'], p, ) p.setInt('Sampling factor 2', pt['brick_sampling_factor_2']) p.setInt('Weight 2', pt['brick_weight_2']) p.setFloat('Mortar thickness', pt['brick_mortar_thickness']) c = Crgb() c.assign(*pt['brick_mortar_color']) p.setRgb('Mortar color', c) self.texture_data_to_mxparams('Mortar texture', pt['brick_mortar_texture'], p, ) t.addProceduralTexture(p) elif(pt['use'] == 'CHECKER'): e = self.mgr.createDefaultTextureExtension('Checker') p = e.getExtensionData() p.setFloat('Blend procedural', pt['blending_factor']) c = Crgb() c.assign(*pt['checker_color_0']) p.setRgb('Color0', c) c = Crgb() c.assign(*pt['checker_color_1']) p.setRgb('Color1', c) p.setUInt('Number of elements U', pt['checker_number_of_elements_u']) p.setUInt('Number of elements V', pt['checker_number_of_elements_v']) p.setFloat('Transition sharpness', pt['checker_transition_sharpness']) p.setUInt('Fall-off', pt['checker_falloff']) t.addProceduralTexture(p) elif(pt['use'] == 'CIRCLE'): e = self.mgr.createDefaultTextureExtension('Circle') p = e.getExtensionData() p.setFloat('Blend procedural', pt['blending_factor']) c = Crgb() c.assign(*pt['circle_background_color']) p.setRgb('Background color', c) c = Crgb() c.assign(*pt['circle_circle_color']) p.setRgb('Circle color', c) p.setFloat('RadiusU', pt['circle_radius_u']) p.setFloat('RadiusV', pt['circle_radius_v']) p.setFloat('Transition factor', pt['circle_transition_factor']) p.setUInt('Fall-off', pt['circle_falloff']) t.addProceduralTexture(p) elif(pt['use'] == 'GRADIENT3'): e = self.mgr.createDefaultTextureExtension('Gradient3') p = e.getExtensionData() p.setFloat('Blend procedural', pt['blending_factor']) p.setByte('Gradient U', pt['gradient3_gradient_u']) c = Crgb() c.assign(*pt['gradient3_color0_u']) p.setRgb('Color0 U', c) c = Crgb() c.assign(*pt['gradient3_color1_u']) p.setRgb('Color1 U', c) c = Crgb() c.assign(*pt['gradient3_color2_u']) p.setRgb('Color2 U', c) p.setUInt('Gradient type U', pt['gradient3_gradient_type_u']) p.setFloat('Color1 U position', pt['gradient3_color1_u_position']) p.setByte('Gradient V', pt['gradient3_gradient_v']) c = Crgb() c.assign(*pt['gradient3_color0_v']) p.setRgb('Color0 V', c) c = Crgb() c.assign(*pt['gradient3_color1_v']) p.setRgb('Color1 V', c) c = Crgb() c.assign(*pt['gradient3_color2_v']) p.setRgb('Color2 V', c) p.setUInt('Gradient type V', pt['gradient3_gradient_type_v']) p.setFloat('Color1 V position', pt['gradient3_color1_v_position']) t.addProceduralTexture(p) elif(pt['use'] == 'GRADIENT'): e = self.mgr.createDefaultTextureExtension('Gradient') p = e.getExtensionData() p.setFloat('Blend procedural', pt['blending_factor']) p.setByte('Gradient U', pt['gradient_gradient_u']) c = Crgb() c.assign(*pt['gradient_color0_u']) p.setRgb('Color0 U', c) c = Crgb() c.assign(*pt['gradient_color1_u']) p.setRgb('Color1 U', c) p.setUInt('Gradient type U', pt['gradient_gradient_type_u']) p.setFloat('Transition factor U', pt['gradient_transition_factor_u']) p.setByte('Gradient V', pt['gradient_gradient_v']) c = Crgb() c.assign(*pt['gradient_color0_v']) p.setRgb('Color0 V', c) c = Crgb() c.assign(*pt['gradient_color1_v']) p.setRgb('Color1 V', c) p.setUInt('Gradient type V', pt['gradient_gradient_type_v']) p.setFloat('Transition factor V', pt['gradient_transition_factor_v']) t.addProceduralTexture(p) elif(pt['use'] == 'GRID'): e = self.mgr.createDefaultTextureExtension('Grid') p = e.getExtensionData() p.setFloat('Blend procedural', pt['blending_factor']) c = Crgb() c.assign(*pt['grid_boundary_color']) p.setRgb('Boundary color', c) c = Crgb() c.assign(*pt['grid_cell_color']) p.setRgb('Cell color', c) p.setFloat('Cell width', pt['grid_cell_width']) p.setFloat('Cell height', pt['grid_cell_height']) if(pt['grid_horizontal_lines']): p.setFloat('Boundary thickness U', pt['grid_boundary_thickness_u']) else: p.setFloat('Boundary thickness U', 0.0) if(pt['grid_vertical_lines']): p.setFloat('Boundary thickness V', pt['grid_boundary_thickness_v']) else: p.setFloat('Boundary thickness V', 0.0) p.setFloat('Transition sharpness', pt['grid_transition_sharpness']) p.setUInt('Fall-off', pt['grid_falloff']) t.addProceduralTexture(p) elif(pt['use'] == 'MARBLE'): e = self.mgr.createDefaultTextureExtension('Marble') p = e.getExtensionData() p.setFloat('Blend procedural', pt['blending_factor']) p.setUInt('Coordinates type', pt['marble_coordinates_type']) c = Crgb() c.assign(*pt['marble_color0']) p.setRgb('Color0', c) c = Crgb() c.assign(*pt['marble_color1']) p.setRgb('Color1', c) c = Crgb() c.assign(*pt['marble_color2']) p.setRgb('Color2', c) p.setFloat('Frequency', pt['marble_frequency']) p.setFloat('Detail', pt['marble_detail']) p.setInt('Octaves', pt['marble_octaves']) p.setUInt('Seed', pt['marble_seed']) t.addProceduralTexture(p) elif(pt['use'] == 'NOISE'): e = self.mgr.createDefaultTextureExtension('Noise') p = e.getExtensionData() p.setFloat('Blend procedural', pt['blending_factor']) p.setUInt('Coordinates type', pt['noise_coordinates_type']) c = Crgb() c.assign(*pt['noise_noise_color']) p.setRgb('Noise color', c) c = Crgb() c.assign(*pt['noise_background_color']) p.setRgb('Background color', c) p.setFloat('Detail', pt['noise_detail']) p.setFloat('Persistance', pt['noise_persistance']) p.setInt('Octaves', pt['noise_octaves']) p.setFloat('Low value', pt['noise_low_value']) p.setFloat('High value', pt['noise_high_value']) p.setUInt('Seed', pt['noise_seed']) t.addProceduralTexture(p) elif(pt['use'] == 'VORONOI'): e = self.mgr.createDefaultTextureExtension('Voronoi') p = e.getExtensionData() p.setFloat('Blend procedural', pt['blending_factor']) p.setUInt('Coordinates type', pt['voronoi_coordinates_type']) c = Crgb() c.assign(*pt['voronoi_color0']) p.setRgb('Color0', c) c = Crgb() c.assign(*pt['voronoi_color1']) p.setRgb('Color1', c) p.setInt('Detail', pt['voronoi_detail']) p.setUInt('Distance', pt['voronoi_distance']) p.setUInt('Combination', pt['voronoi_combination']) p.setFloat('Low value', pt['voronoi_low_value']) p.setFloat('High value', pt['voronoi_high_value']) p.setUInt('Seed', pt['voronoi_seed']) t.addProceduralTexture(p) elif(pt['use'] == 'TILED'): e = self.mgr.createDefaultTextureExtension('TiledTexture') p = e.getExtensionData() p.setFloat('Blend factor', pt['blending_factor']) c = Crgb() c.assign(*pt['tiled_base_color']) p.setRgb('Base Color', c) p.setByte('Use base color', pt['tiled_use_base_color']) p.setString('Filename_mask', pt['tiled_token_mask']) p.setString('Filename', pt['tiled_filename']) # 'Map U tile range' UCHAR # 'Map V tile range' UCHAR t.addProceduralTexture(p) elif(pt['use'] == 'WIREFRAME'): e = self.mgr.createDefaultTextureExtension('WireframeTexture') p = e.getExtensionData() c = Crgb() c.assign(*pt['wireframe_fill_color']) p.setRgb('Fill Color', c) c = Crgb() c.assign(*pt['wireframe_edge_color']) p.setRgb('Edge Color', c) c = Crgb() c.assign(*pt['wireframe_coplanar_edge_color']) p.setRgb('Coplanar Edge Color', c) p.setFloat('Edge Width', pt['wireframe_edge_width']) p.setFloat('Coplanar Edge Width', pt['wireframe_coplanar_edge_width']) p.setFloat('Coplanar Threshold', pt['wireframe_coplanar_threshold']) t.addProceduralTexture(p) else: raise TypeError("{0} is unknown procedural texture type".format(pt['use'])) return t def material_placeholder(self, n=None, ): if(n is not None): pass else: n = 'MATERIAL_PLACEHOLDER' s = self.mxs m = s.createMaterial(n) l = m.addLayer() b = l.addBSDF() r = b.getReflectance() a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = CtextureMap() mgr = CextensionManager.instance() mgr.loadAllExtensions() e = mgr.createDefaultTextureExtension('Checker') ch = e.getExtensionData() ch.setUInt('Number of elements U', 32) ch.setUInt('Number of elements V', 32) t.addProceduralTexture(ch) a.textureMap = t r.setAttribute('color', a) return m def material_default(self, n, ): s = self.mxs m = s.createMaterial(n) l = m.addLayer() b = l.addBSDF() return m def material_external(self, d, ): s = self.mxs p = d['path'] t = s.readMaterial(p) t.setName(d['name']) m = s.addMaterial(t) if(not d['embed']): m.setReference(1, p) return m def material_custom(self, d, ): s = self.mxs m = s.createMaterial(d['name']) d = d['data'] def global_props(d, m): # global properties if(d['override_map']): t = self.texture(d['override_map']) if(t is not None): m.setGlobalMap(t) if(d['bump_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(d['bump_map']) if(t is not None): a.textureMap = t if(d['bump_map_use_normal']): a.value = d['bump_normal'] else: a.value = d['bump'] m.setAttribute('bump', a) m.setNormalMapState(d['bump_map_use_normal']) m.setDispersion(d['dispersion']) m.setMatteShadow(d['shadow']) m.setMatte(d['matte']) m.setNestedPriority(d['priority']) c = Crgb() c.assign(*d['id']) m.setColorID(c) if(d['active_display_map']): t = self.texture(d['active_display_map']) m.setActiveDisplayMap(t) def displacement(d, m): if(not d['enabled']): return m.enableDisplacement(True) if(d['map'] is not None): t = self.texture(d['map']) m.setDisplacementMap(t) m.setDisplacementCommonParameters(d['type'], d['subdivision'], int(d['smoothing']), d['offset'], d['subdivision_method'], d['uv_interpolation'], ) m.setHeightMapDisplacementParameters(d['height'], d['height_units'], d['adaptive'], ) v = Cvector(*d['v3d_scale']) m.setVectorDisplacementParameters(v, d['v3d_transform'], d['v3d_rgb_mapping'], d['v3d_preset'], ) def add_bsdf(d, l): b = l.addBSDF() b.setName(d['name']) bp = d['bsdf_props'] # weight if(bp['weight_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(bp['weight_map']) if(t is not None): a.textureMap = t a.value = bp['weight'] else: a = Cattribute() a.activeType = MAP_TYPE_VALUE a.value = bp['weight'] b.setWeight(a) # enabled if(not bp['visible']): b.setState(False) # ior r = b.getReflectance() if(bp['ior'] == 1): # measured data r.setActiveIorMode(1) r.setComplexIor(bp['complex_ior']) else: if(bp['reflectance_0_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(bp['reflectance_0_map']) if(t is not None): a.textureMap = t a.rgb.assign(*bp['reflectance_0']) else: a = Cattribute() a.activeType = MAP_TYPE_RGB a.rgb.assign(*bp['reflectance_0']) r.setAttribute('color', a) if(bp['reflectance_90_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(bp['reflectance_90_map']) if(t is not None): a.textureMap = t a.rgb.assign(*bp['reflectance_90']) else: a = Cattribute() a.activeType = MAP_TYPE_RGB a.rgb.assign(*bp['reflectance_90']) r.setAttribute('color.tangential', a) if(bp['transmittance_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(bp['transmittance_map']) if(t is not None): a.textureMap = t a.rgb.assign(*bp['transmittance']) else: a = Cattribute() a.activeType = MAP_TYPE_RGB a.rgb.assign(*bp['transmittance']) r.setAttribute('transmittance.color', a) r.setAbsorptionDistance(bp['attenuation_units'], bp['attenuation']) r.setIOR(bp['nd'], bp['abbe']) if(bp['force_fresnel']): r.enableForceFresnel(True) r.setConductor(bp['k']) if(bp['r2_enabled']): r.setFresnelCustom(bp['r2_falloff_angle'], bp['r2_influence'], True, ) # surface if(bp['roughness_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(bp['roughness_map']) if(t is not None): a.textureMap = t a.value = bp['roughness'] else: a = Cattribute() a.activeType = MAP_TYPE_VALUE a.value = bp['roughness'] b.setAttribute('roughness', a) if(bp['bump_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(bp['bump_map']) if(t is not None): a.textureMap = t if(bp['bump_map_use_normal']): a.value = bp['bump_normal'] else: a.value = bp['bump'] else: a = Cattribute() a.activeType = MAP_TYPE_VALUE if(bp['bump_map_use_normal']): a.value = bp['bump_normal'] else: a.value = bp['bump'] b.setAttribute('bump', a) b.setNormalMapState(bp['bump_map_use_normal']) if(bp['anisotropy_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(bp['anisotropy_map']) if(t is not None): a.textureMap = t a.value = bp['anisotropy'] else: a = Cattribute() a.activeType = MAP_TYPE_VALUE a.value = bp['anisotropy'] b.setAttribute('anisotropy', a) if(bp['anisotropy_angle_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(bp['anisotropy_angle_map']) if(t is not None): a.textureMap = t a.value = bp['anisotropy_angle'] else: a = Cattribute() a.activeType = MAP_TYPE_VALUE a.value = bp['anisotropy_angle'] b.setAttribute('angle', a) # subsurface a = Cattribute() a.activeType = MAP_TYPE_RGB a.rgb.assign(*bp['scattering']) r.setAttribute('scattering', a) r.setScatteringParameters(bp['coef'], bp['asymmetry'], bp['single_sided']) if(bp['single_sided']): if(bp['single_sided_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(bp['single_sided_map']) if(t is not None): a.textureMap = t a.value = bp['single_sided_value'] else: a = Cattribute() a.activeType = MAP_TYPE_VALUE a.value = bp['single_sided_value'] r.setScatteringThickness(a) r.setScatteringThicknessRange(bp['single_sided_min'], bp['single_sided_max']) # coating cp = d['coating'] if(cp['enabled']): c = b.addCoating() if(cp['thickness_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(cp['thickness_map']) if(t is not None): a.textureMap = t a.value = cp['thickness'] else: a = Cattribute() a.activeType = MAP_TYPE_VALUE a.value = cp['thickness'] c.setThickness(a) c.setThicknessRange(cp['thickness_map_min'], cp['thickness_map_max']) r = c.getReflectance() if(cp['ior'] == 1): # measured data r.setActiveIorMode(1) r.setComplexIor(cp['complex_ior']) else: if(cp['reflectance_0_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(cp['reflectance_0_map']) if(t is not None): a.textureMap = t a.rgb.assign(*cp['reflectance_0']) else: a = Cattribute() a.activeType = MAP_TYPE_RGB a.rgb.assign(*cp['reflectance_0']) r.setAttribute('color', a) if(cp['reflectance_90_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(cp['reflectance_90_map']) if(t is not None): a.textureMap = t a.rgb.assign(*cp['reflectance_90']) else: a = Cattribute() a.activeType = MAP_TYPE_RGB a.rgb.assign(*cp['reflectance_90']) r.setAttribute('color.tangential', a) r.setIOR(cp['nd'], 1.0, ) if(cp['force_fresnel']): r.enableForceFresnel(True) r.setConductor(cp['k']) if(cp['r2_enabled']): r.setFresnelCustom(cp['r2_falloff_angle'], 0.0, True, ) def add_emitter(d, l): e = l.createEmitter() if(d['type'] == 0): e.setLobeType(EMISSION_LOBE_DEFAULT) elif(d['type'] == 1): e.setLobeType(EMISSION_LOBE_IES) e.setLobeIES(d['ies_data']) e.setIESLobeIntensity(d['ies_intensity']) elif(d['type'] == 2): e.setLobeType(EMISSION_LOBE_SPOTLIGHT) if(d['spot_map'] is not None): t = self.texture(d['spot_map']) if(t is not None): e.setLobeImageProjectedMap(d['spot_map_enabled'], t) e.setSpotConeAngle(d['spot_cone_angle']) e.setSpotFallOffAngle(d['spot_falloff_angle']) e.setSpotFallOffType(d['spot_falloff_type']) e.setSpotBlur(d['spot_blur']) if(d['emission'] == 0): e.setActiveEmissionType(EMISSION_TYPE_PAIR) ep = CemitterPair() c = Crgb() c.assign(*d['color']) ep.rgb.assign(c) ep.temperature = d['color_black_body'] ep.watts = d['luminance_power'] ep.luminousEfficacy = d['luminance_efficacy'] ep.luminousPower = d['luminance_output'] ep.illuminance = d['luminance_output'] ep.luminousIntensity = d['luminance_output'] ep.luminance = d['luminance_output'] e.setPair(ep) if(d['luminance'] == 0): u = EMISSION_UNITS_WATTS_AND_LUMINOUS_EFFICACY elif(d['luminance'] == 1): u = EMISSION_UNITS_LUMINOUS_POWER elif(d['luminance'] == 2): u = EMISSION_UNITS_ILLUMINANCE elif(d['luminance'] == 3): u = EMISSION_UNITS_LUMINOUS_INTENSITY elif(d['luminance'] == 4): u = EMISSION_UNITS_LUMINANCE if(d['color_black_body_enabled']): e.setActivePair(EMISSION_COLOR_TEMPERATURE, u) else: e.setActivePair(EMISSION_RGB, u) elif(d['emission'] == 1): e.setActiveEmissionType(EMISSION_TYPE_TEMPERATURE) e.setTemperature(d['temperature_value']) elif(d['emission'] == 2): e.setActiveEmissionType(EMISSION_TYPE_MXI) a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(d['hdr_map']) if(t is not None): a.textureMap = t a.value = d['hdr_intensity'] e.setMXI(a) e.setState(True) def add_layer(d, m): l = m.addLayer() l.setName(d['name']) lpd = d['layer_props'] if(not lpd['visible']): l.setEnabled(False) if(lpd['blending'] == 1): l.setStackedBlendingMode(1) if(lpd['opacity_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(lpd['opacity_map']) if(t is not None): a.textureMap = t a.value = lpd['opacity'] else: a = Cattribute() a.activeType = MAP_TYPE_VALUE a.value = lpd['opacity'] l.setAttribute('weight', a) epd = d['emitter'] if(epd['enabled']): add_emitter(epd, l) for b in d['bsdfs']: add_bsdf(b, l) global_props(d['global_props'], m) displacement(d['displacement'], m) for layer in d['layers']: add_layer(layer, m) return m def material(self, d, ): s = self.mxs if(d['subtype'] == 'EXTERNAL'): if(d['path'] == ''): m = self.material_placeholder(d['name']) else: m = self.material_external(d) if(d['override']): # global properties if(d['override_map']): t = self.texture(d['override_map']) if(t is not None): m.setGlobalMap(t) if(d['bump_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(d['bump_map']) if(t is not None): a.textureMap = t if(d['bump_map_use_normal']): a.value = d['bump_normal'] else: a.value = d['bump'] m.setAttribute('bump', a) m.setNormalMapState(d['bump_map_use_normal']) m.setDispersion(d['dispersion']) m.setMatteShadow(d['shadow']) m.setMatte(d['matte']) m.setNestedPriority(d['priority']) c = Crgb() c.assign(*d['id']) m.setColorID(c) elif(d['subtype'] == 'EXTENSION'): if(d['use'] == 'EMITTER'): m = s.createMaterial(d['name']) l = m.addLayer() e = l.createEmitter() if(d['emitter_type'] == 0): e.setLobeType(EMISSION_LOBE_DEFAULT) elif(d['emitter_type'] == 1): e.setLobeType(EMISSION_LOBE_IES) e.setLobeIES(d['emitter_ies_data']) e.setIESLobeIntensity(d['emitter_ies_intensity']) elif(d['emitter_type'] == 2): e.setLobeType(EMISSION_LOBE_SPOTLIGHT) if(d['emitter_spot_map'] is not None): t = self.texture(d['emitter_spot_map']) if(t is not None): e.setLobeImageProjectedMap(d['emitter_spot_map_enabled'], t) e.setSpotConeAngle(d['emitter_spot_cone_angle']) e.setSpotFallOffAngle(d['emitter_spot_falloff_angle']) e.setSpotFallOffType(d['emitter_spot_falloff_type']) e.setSpotBlur(d['emitter_spot_blur']) if(d['emitter_emission'] == 0): e.setActiveEmissionType(EMISSION_TYPE_PAIR) ep = CemitterPair() c = Crgb() c.assign(*d['emitter_color']) ep.rgb.assign(c) ep.temperature = d['emitter_color_black_body'] ep.watts = d['emitter_luminance_power'] ep.luminousEfficacy = d['emitter_luminance_efficacy'] ep.luminousPower = d['emitter_luminance_output'] ep.illuminance = d['emitter_luminance_output'] ep.luminousIntensity = d['emitter_luminance_output'] ep.luminance = d['emitter_luminance_output'] e.setPair(ep) if(d['emitter_luminance'] == 0): u = EMISSION_UNITS_WATTS_AND_LUMINOUS_EFFICACY elif(d['emitter_luminance'] == 1): u = EMISSION_UNITS_LUMINOUS_POWER elif(d['emitter_luminance'] == 2): u = EMISSION_UNITS_ILLUMINANCE elif(d['emitter_luminance'] == 3): u = EMISSION_UNITS_LUMINOUS_INTENSITY elif(d['emitter_luminance'] == 4): u = EMISSION_UNITS_LUMINANCE if(d['emitter_color_black_body_enabled']): e.setActivePair(EMISSION_COLOR_TEMPERATURE, u) else: e.setActivePair(EMISSION_RGB, u) elif(d['emitter_emission'] == 1): e.setActiveEmissionType(EMISSION_TYPE_TEMPERATURE) e.setTemperature(d['emitter_temperature_value']) elif(d['emitter_emission'] == 2): e.setActiveEmissionType(EMISSION_TYPE_MXI) a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(d['emitter_hdr_map']) if(t is not None): a.textureMap = t a.value = d['emitter_hdr_intensity'] e.setMXI(a) e.setState(True) def global_props(d, m): # global properties if(d['override_map']): t = texture(d['override_map'], s, ) if(t is not None): m.setGlobalMap(t) if(d['bump_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = texture(d['bump_map'], s, ) if(t is not None): a.textureMap = t if(d['bump_map_use_normal']): a.value = d['bump_normal'] else: a.value = d['bump'] m.setAttribute('bump', a) m.setNormalMapState(d['bump_map_use_normal']) m.setDispersion(d['dispersion']) m.setMatteShadow(d['shadow']) m.setMatte(d['matte']) m.setNestedPriority(d['priority']) c = Crgb() c.assign(*d['id']) m.setColorID(c) if(d['active_display_map']): t = texture(d['active_display_map'], s, ) m.setActiveDisplayMap(t) global_props(d, m) else: m = CextensionManager.instance() m.loadAllExtensions() if(d['use'] == 'AGS'): e = m.createDefaultMaterialModifierExtension('AGS') p = e.getExtensionData() c = Crgb() c.assign(*d['ags_color']) p.setRgb('Color', c) p.setFloat('Reflection', d['ags_reflection']) p.setUInt('Type', d['ags_type']) elif(d['use'] == 'OPAQUE'): e = m.createDefaultMaterialModifierExtension('Opaque') p = e.getExtensionData() p.setByte('Color Type', d['opaque_color_type']) c = Crgb() c.assign(*d['opaque_color']) p.setRgb('Color', c) self.texture_data_to_mxparams('Color Map', d['opaque_color_map'], p, ) p.setByte('Shininess Type', d['opaque_shininess_type']) p.setFloat('Shininess', d['opaque_shininess']) self.texture_data_to_mxparams('Shininess Map', d['opaque_shininess_map'], p, ) p.setByte('Roughness Type', d['opaque_roughness_type']) p.setFloat('Roughness', d['opaque_roughness']) self.texture_data_to_mxparams('Roughness Map', d['opaque_roughness_map'], p, ) p.setByte('Clearcoat', d['opaque_clearcoat']) elif(d['use'] == 'TRANSPARENT'): e = m.createDefaultMaterialModifierExtension('Transparent') p = e.getExtensionData() p.setByte('Color Type', d['transparent_color_type']) c = Crgb() c.assign(*d['transparent_color']) p.setRgb('Color', c) self.texture_data_to_mxparams('Color Map', d['transparent_color_map'], p, ) p.setFloat('Ior', d['transparent_ior']) p.setFloat('Transparency', d['transparent_transparency']) p.setByte('Roughness Type', d['transparent_roughness_type']) p.setFloat('Roughness', d['transparent_roughness']) self.texture_data_to_mxparams('Roughness Map', d['transparent_roughness_map'], p, ) p.setFloat('Specular Tint', d['transparent_specular_tint']) p.setFloat('Dispersion', d['transparent_dispersion']) p.setByte('Clearcoat', d['transparent_clearcoat']) elif(d['use'] == 'METAL'): e = m.createDefaultMaterialModifierExtension('Metal') p = e.getExtensionData() p.setUInt('IOR', d['metal_ior']) p.setFloat('Tint', d['metal_tint']) p.setByte('Color Type', d['metal_color_type']) c = Crgb() c.assign(*d['metal_color']) p.setRgb('Color', c) self.texture_data_to_mxparams('Color Map', d['metal_color_map'], p, ) p.setByte('Roughness Type', d['metal_roughness_type']) p.setFloat('Roughness', d['metal_roughness']) self.texture_data_to_mxparams('Roughness Map', d['metal_roughness_map'], p, ) p.setByte('Anisotropy Type', d['metal_anisotropy_type']) p.setFloat('Anisotropy', d['metal_anisotropy']) self.texture_data_to_mxparams('Anisotropy Map', d['metal_anisotropy_map'], p, ) p.setByte('Angle Type', d['metal_angle_type']) p.setFloat('Angle', d['metal_angle']) self.texture_data_to_mxparams('Angle Map', d['metal_angle_map'], p, ) p.setByte('Dust Type', d['metal_dust_type']) p.setFloat('Dust', d['metal_dust']) self.texture_data_to_mxparams('Dust Map', d['metal_dust_map'], p, ) p.setByte('Perforation Enabled', d['metal_perforation_enabled']) self.texture_data_to_mxparams('Perforation Map', d['metal_perforation_map'], p, ) elif(d['use'] == 'TRANSLUCENT'): e = m.createDefaultMaterialModifierExtension('Translucent') p = e.getExtensionData() p.setFloat('Scale', d['translucent_scale']) p.setFloat('Ior', d['translucent_ior']) p.setByte('Color Type', d['translucent_color_type']) c = Crgb() c.assign(*d['translucent_color']) p.setRgb('Color', c) self.texture_data_to_mxparams('Color Map', d['translucent_color_map'], p, ) p.setFloat('Hue Shift', d['translucent_hue_shift']) p.setByte('Invert Hue', d['translucent_invert_hue']) p.setFloat('Vibrance', d['translucent_vibrance']) p.setFloat('Density', d['translucent_density']) p.setFloat('Opacity', d['translucent_opacity']) p.setByte('Roughness Type', d['translucent_roughness_type']) p.setFloat('Roughness', d['translucent_roughness']) self.texture_data_to_mxparams('Roughness Map', d['translucent_roughness_map'], p, ) p.setFloat('Specular Tint', d['translucent_specular_tint']) p.setByte('Clearcoat', d['translucent_clearcoat']) p.setFloat('Clearcoat Ior', d['translucent_clearcoat_ior']) elif(d['use'] == 'CARPAINT'): e = m.createDefaultMaterialModifierExtension('Car Paint') p = e.getExtensionData() c = Crgb() c.assign(*d['carpaint_color']) p.setRgb('Color', c) p.setFloat('Metallic', d['carpaint_metallic']) p.setFloat('Topcoat', d['carpaint_topcoat']) elif(d['use'] == 'HAIR'): e = m.createDefaultMaterialModifierExtension('Hair') p = e.getExtensionData() p.setByte('Color Type', d['hair_color_type']) c = Crgb() c.assign(*d['hair_color']) p.setRgb('Color', c) self.texture_data_to_mxparams('Color Map', d['hair_color_map'], p, ) self.texture_data_to_mxparams('Root-Tip Map', d['hair_root_tip_map'], p, ) p.setByte('Root-Tip Weight Type', d['hair_root_tip_weight_type']) p.setFloat('Root-Tip Weight', d['hair_root_tip_weight']) self.texture_data_to_mxparams('Root-Tip Weight Map', d['hair_root_tip_weight_map'], p, ) p.setFloat('Primary Highlight Strength', d['hair_primary_highlight_strength']) p.setFloat('Primary Highlight Spread', d['hair_primary_highlight_spread']) c = Crgb() c.assign(*d['hair_primary_highlight_tint']) p.setRgb('Primary Highlight Tint', c) p.setFloat('Secondary Highlight Strength', d['hair_secondary_highlight_strength']) p.setFloat('Secondary Highlight Spread', d['hair_secondary_highlight_spread']) c = Crgb() c.assign(*d['hair_secondary_highlight_tint']) p.setRgb('Secondary Highlight Tint', c) m = s.createMaterial(d['name']) m.applyMaterialModifierExtension(p) # global properties if(d['override_map']): t = self.texture(d['override_map']) if(t is not None): m.setGlobalMap(t) if(d['bump_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(d['bump_map']) if(t is not None): a.textureMap = t if(d['bump_map_use_normal']): a.value = d['bump_normal'] else: a.value = d['bump'] m.setAttribute('bump', a) m.setNormalMapState(d['bump_map_use_normal']) m.setDispersion(d['dispersion']) m.setMatteShadow(d['shadow']) m.setMatte(d['matte']) m.setNestedPriority(d['priority']) c = Crgb() c.assign(*d['id']) m.setColorID(c) if(d['active_display_map']): t = self.texture(d['active_display_map']) m.setActiveDisplayMap(t) def displacement(d, m): if(not d['enabled']): return m.enableDisplacement(True) if(d['map'] is not None): t = self.texture(d['map']) m.setDisplacementMap(t) m.setDisplacementCommonParameters(d['type'], d['subdivision'], int(d['smoothing']), d['offset'], d['subdivision_method'], d['uv_interpolation'], ) m.setHeightMapDisplacementParameters(d['height'], d['height_units'], d['adaptive'], ) v = Cvector(*d['v3d_scale']) m.setVectorDisplacementParameters(v, d['v3d_transform'], d['v3d_rgb_mapping'], d['v3d_preset'], ) try: displacement(d['displacement'], m) except KeyError: pass elif(d['subtype'] == 'CUSTOM'): m = self.material_custom(d) else: raise TypeError("Material '{}' {} is unknown type".format(d['name'], d['subtype'])) def get_material(self, n, ): """get material by name from scene, if material is missing, create and return placeholder""" def get_material_names(s): it = CmaxwellMaterialIterator() o = it.first(s) l = [] while not o.isNull(): name = o.getName() l.append(name) o = it.next() return l s = self.mxs names = get_material_names(s) m = None if(n in names): m = s.getMaterial(n) if(m is None): # should not happen because i stopped changing material names.. but i leave it here m = self.material_placeholder() return m def camera(self, props, steps, active=False, lens_extra=None, response=None, region=None, custom_bokeh=(1.0, 0.0, False), cut_planes=(0.0, 1e7, False), shift_lens=(0.0, 0.0), ): """Create camera. props (string name, int nSteps, float shutter, float filmWidth, float filmHeight, float iso, int diaphragmType, float angle, int nBlades, float fps, int xRes, int yRes, float pixelAspect, int lensType, int projectionType) steps [(int iStep, [3 float] origin, [3 float] focalPoint, [3 float] up, float focalLength, float fStop, bool focalLengthNeedCorrection), ..., ] active bool lens_extra float or None response string or None region (float x1, float y1, float x2, float y2, string type) or None custom_bokeh (float ratio, float angle, bool enabled) or None cut_planes (float near, float far, bool enabled) or None shift_lens (float x, float y) or None """ s = self.mxs if(props[13] in [6, 7]): props2 = list(props[:]) props2[13] = TYPE_EXTENSION_LENS c = s.addCamera(*props2) else: c = s.addCamera(*props) for step in steps: l = list(step[:]) l[1] = Cvector(*l[1]) l[2] = Cvector(*l[2]) l[3] = Cvector(*l[3]) c.setStep(*l) # TYPE_THIN_LENS, TYPE_PINHOLE, TYPE_ORTHO if(lens_extra is not None): if(props[13] == TYPE_FISHEYE_LENS): c.setFishLensProperties(lens_extra) if(props[13] == TYPE_SPHERICAL_LENS): c.setSphericalLensProperties(lens_extra) if(props[13] == TYPE_CYLINDRICAL_LENS): c.setCylindricalLensProperties(lens_extra) if(props[13] == 6): p = MXparamList() p.createString('EXTENSION_NAME', 'Lat-Long Stereo') p.createUInt('Type', lens_extra[0], 0, 2) p.createFloat('FOV Vertical', lens_extra[1], 180.0, 0.0) p.createFloat('FOV Horizontal', lens_extra[2], 360.0, 0.0) p.createByte('Flip Ray X', lens_extra[3], 0, 1) p.createByte('Flip Ray Y', lens_extra[4], 0, 1) p.createFloat('Parallax Distance', lens_extra[5], 0.0, 360.0) p.createByte('Zenith Mode', lens_extra[6], 0, 1) p.createFloat('Separation', lens_extra[7], 0.0, 100000.0) p.createTextureMap('Separation Map', CtextureMap()) self.texture_data_to_mxparams('Separation Map', lens_extra[8], p, ) c.applyCameraLensExtension(p) if(props[13] == 7): p = MXparamList() p.createString('EXTENSION_NAME', 'Fish Stereo') p.createUInt('Type', lens_extra[0], 0, 2) p.createFloat('FOV', lens_extra[1], 0.0, 360.0) p.createFloat('Separation', lens_extra[2], 0.0, 1000000.0) p.createTextureMap('Separation Map', CtextureMap()) self.texture_data_to_mxparams('Separation Map', lens_extra[3], p, ) p.createByte('Vertical Mode', lens_extra[4], 0, 1) p.createFloat('Dome Radius', lens_extra[5], 1.0, 1000000.0) p.createTextureMap('Turn Map', CtextureMap()) self.texture_data_to_mxparams('Turn Map', lens_extra[6], p, ) p.createByte('Dome Tilt Compensation', lens_extra[7], 0, 1) p.createFloat('Dome Tilt', lens_extra[8], 0.0, 90.0) p.createTextureMap('Tilt Map', CtextureMap()) self.texture_data_to_mxparams('Tilt Map', lens_extra[9], p, ) c.applyCameraLensExtension(p) if(response is not None): c.setCameraResponsePreset(response) if(custom_bokeh is not None): c.setCustomBokeh(*custom_bokeh) if(cut_planes is not None): c.setCutPlanes(*cut_planes) if(shift_lens is not None): c.setShiftLens(*shift_lens) if(region is not None): if(region[2] == props[3]): region[2] -= 1 if(region[3] == props[4]): region[3] -= 1 c.setScreenRegion(*region) if(active): c.setActive() return c def empty(self, name, matrix, motion, object_props=None, ): """Create empty object. name string matrix (((3 float), (3 float), (3 float), (3 float)), ((3 float), (3 float), (3 float), (3 float)), (3 float), (3 float), (3 float)) - base, pivot, location, rotation, scale object_props (bool hide, float opacity, tuple cid=(int, int, int), bool hcam, bool hcamsc, bool hgi, bool hrr, bool hzcp, ) or None """ s = self.mxs o = s.createMesh(name, 0, 0, 0, 0, ) self.set_base_and_pivot(o, matrix, motion, ) if(object_props is not None): self.set_object_props(o, *object_props) return o def mesh(self, name, matrix, motion, num_positions, vertices, normals, triangles, triangle_normals, uv_channels, object_props=None, num_materials=0, materials=[], triangle_materials=None, backface_material=None, ): """Create mesh object. name string base ((3 float), (3 float), (3 float), (3 float)) pivot ((3 float), (3 float), (3 float), (3 float)) num_positions int vertices [[(float x, float y, float z), ..., ], [...], ] normals [[(float x, float y, float z), ..., ], [...], ] triangles [(int iv0, int iv1, int iv2, int in0, int in1, int in2, ), ..., ], ] # (3x vertex index, 3x normal index) triangle_normals [[(float x, float y, float z), ..., ], [...], ] uv_channels [[(float u1, float v1, float w1, float u2, float v2, float w2, float u3, float v3, float w3, ), ..., ], ..., ] or None # ordered by uv index and ordered by triangle index num_materials int object_props (bool hide, float opacity, tuple cid=(int, int, int), bool hcam, bool hcamsc, bool hgi, bool hrr, bool hzcp, ) or None materials [(string path, bool embed), ..., ] or None triangle_materials [(int tri_id, int mat_id), ..., ] or None backface_material (string path, bool embed) or None """ s = self.mxs o = s.createMesh(name, len(vertices[0]), len(normals[0]) + len(triangle_normals[0]), len(triangles), num_positions) if(uv_channels is not None): for i in range(len(uv_channels)): o.addChannelUVW(i) # an = 0 for ip in range(num_positions): an = 0 verts = vertices[ip] norms = normals[ip] for i, loc in enumerate(verts): o.setVertex(i, ip, Cvector(*loc), ) o.setNormal(i, ip, Cvector(*norms[i]), ) an += 1 for ip in range(num_positions): trinorms = triangle_normals[ip] for i, nor in enumerate(trinorms): o.setNormal(an + i, ip, Cvector(*nor), ) if(type(triangles) is not list): # pymaxwell does not like numpy arrays.. Cvectors has no problems, but setTriangle does.. triangles = triangles.tolist() for i, tri in enumerate(triangles): o.setTriangle(i, *tri) if(uv_channels is not None): for iuv, uv in enumerate(uv_channels): for it, t in enumerate(uv): o.setTriangleUVW(it, iuv, *t) self.set_base_and_pivot(o, matrix, motion, ) if(object_props is not None): self.set_object_props(o, *object_props) if(materials is not None): if(num_materials > 1): # multi material mats = [] for i in range(num_materials): try: n = materials[i] mat = self.get_material(n) except: mat = self.material_placeholder() mats.append(mat) # pymaxwell does not like numpy arrays.. if(type(triangle_materials) is not list): triangle_materials = triangle_materials.tolist() for tid, mid in triangle_materials: o.setTriangleMaterial(tid, mats[mid]) else: # single material if(len(materials) == 1): if(materials[0] != ''): mat = self.get_material(materials[0]) o.setMaterial(mat) else: # no material pass if(backface_material is not None): if(backface_material != ''): # only single backface material mat = self.get_material(backface_material) o.setBackfaceMaterial(mat) return o def instance(self, name, instanced_name, matrix, motion=None, object_props=None, materials=None, backface_material=None, ): """Create instance of mesh object. Instanced object must exist in scene. name string instanced_name string base ((3 float), (3 float), (3 float), (3 float)) pivot ((3 float), (3 float), (3 float), (3 float)) object_props (bool hide, float opacity, tuple cid=(int, int, int), bool hcam, bool hcamsc, bool hgi, bool hrr, bool hzcp, ) or None material (string path, bool embed) or None backface_material (string path, bool embed) or None """ s = self.mxs bo = s.getObject(instanced_name) o = s.createInstancement(name, bo) self.set_base_and_pivot(o, matrix, motion, ) if(object_props is not None): self.set_object_props(o, *object_props) if(materials is not None): if(len(materials) > 1): # multi material instances inherits material from base object pass else: # single material, and i think (not sure) i can't make instance with different material than base in blender.. if(len(materials) == 1): if(materials[0] != ''): mat = self.get_material(materials[0]) o.setMaterial(mat) if(backface_material is not None): if(backface_material != ''): mat = self.get_material(backface_material) o.setBackfaceMaterial(mat) return o def reference(self, name, path, flags, matrix, motion=None, object_props=None, material=None, backface_material=None, ): """Create MXS reference object. name string path string (path) flags [bool, bool, bool, bool] base ((3 float), (3 float), (3 float), (3 float)) pivot ((3 float), (3 float), (3 float), (3 float)) object_props (bool hide, float opacity, tuple cid=(int, int, int), bool hcam, bool hcamsc, bool hgi, bool hrr, bool hzcp, ) or None """ s = self.mxs o = s.createMesh(name, 0, 0, 0, 0, ) o.setReferencedScenePath(path) if(flags[0]): o.setReferencedOverrideFlags(FLAG_OVERRIDE_HIDE) if(flags[1]): o.setReferencedOverrideFlags(FLAG_OVERRIDE_HIDE_TO_CAMERA) if(flags[2]): o.setReferencedOverrideFlags(FLAG_OVERRIDE_HIDE_TO_REFL_REFR) if(flags[3]): o.setReferencedOverrideFlags(FLAG_OVERRIDE_HIDE_TO_GI) self.set_base_and_pivot(o, matrix, motion, ) if(object_props is not None): self.set_object_props(o, *object_props) if(material is not None): if(material != ''): mat = self.get_material(material) o.setMaterial(mat) if(backface_material is not None): if(backface_material != ''): mat = self.get_material(backface_material) o.setBackfaceMaterial(mat) return o def hierarchy(self, tree, ): """Set hierarchy of all objects at once. tree [(obj_name, parent_name or None, ), ..., ] """ s = self.mxs for on, pn, _ in tree: if(pn is not None): o = s.getObject(on) p = s.getObject(pn) o.setParent(p) def environment(self, env_type=None, sky_type=None, sky=None, dome=None, sun_type=None, sun=None, ibl=None, ): """Set Environment properties. env_type string or None PHYSICAL_SKY, IMAGE_BASED, NONE sky_type string or None PHYSICAL, CONSTANT sky dict or None {sky_use_preset bool sky_preset string (path) sky_intensity float sky_planet_refl float sky_ozone float sky_water float sky_turbidity_coeff float sky_wavelength_exp float sky_reflectance float sky_asymmetry float} dome dict or None {dome_intensity float dome_zenith [float, float, float] dome_horizon [float, float, float] dome_mid_point float} sun_type string or None DISABLED, PHYSICAL, CUSTOM sun dict or None {sun_power float sun_radius_factor float sun_temp float sun_color [float, float, float] sun_location_type string LATLONG, ANGLES, DIRECTION sun_latlong_lat float sun_latlong_lon float sun_date string sun_time string sun_latlong_gmt int sun_latlong_gmt_auto bool sun_latlong_ground_rotation float sun_angles_zenith float sun_angles_azimuth float sun_dir_x float sun_dir_y float sun_dir_z float} ibl dict or None {ibl_intensity float ibl_interpolation bool ibl_screen_mapping bool ibl_bg_type string HDR_IMAGE, ACTIVE_SKY, DISABLED ibl_bg_map string (path) ibl_bg_intensity float ibl_bg_scale_x float ibl_bg_scale_y float ibl_bg_offset_x float ibl_bg_offset_y float ibl_refl_type string HDR_IMAGE, ACTIVE_SKY, DISABLED ibl_refl_map string (path) ibl_refl_intensity float ibl_refl_scale_x float ibl_refl_scale_y float ibl_refl_offset_x float ibl_refl_offset_y float ibl_refr_type string HDR_IMAGE, ACTIVE_SKY, DISABLED ibl_refr_map string (path) ibl_refr_intensity float ibl_refr_scale_x float ibl_refr_scale_y float ibl_refr_offset_x float ibl_refr_offset_y float ibl_illum_type string HDR_IMAGE, ACTIVE_SKY, DISABLED ibl_illum_map string (path) ibl_illum_intensity float ibl_illum_scale_x float ibl_illum_scale_y float ibl_illum_offset_x float ibl_illum_offset_y float} """ s = self.mxs env = s.getEnvironment() if(env_type == 'PHYSICAL_SKY' or env_type == 'IMAGE_BASED'): if(sky_type is not None): env.setActiveSky(sky_type) if(sky_type == 'PHYSICAL'): if(not sky["sky_use_preset"]): env.setPhysicalSkyAtmosphere(sky["sky_intensity"], sky["sky_ozone"], sky["sky_water"], sky["sky_turbidity_coeff"], sky["sky_wavelength_exp"], sky["sky_reflectance"], sky["sky_asymmetry"], sky["sky_planet_refl"], ) else: env.loadSkyFromPreset(sky["sky_preset"]) elif(sky_type == 'CONSTANT'): hc = Crgb() hc.assign(*dome['dome_horizon']) zc = Crgb() zc.assign(*dome['dome_zenith']) env.setSkyConstant(dome["dome_intensity"], hc, zc, dome['dome_mid_point']) sc = Crgb() sc.assign(*sun['sun_color']) if(sun_type == 'PHYSICAL'): env.setSunProperties(SUN_PHYSICAL, sun["sun_temp"], sun["sun_power"], sun["sun_radius_factor"], sc) elif(sun_type == 'CUSTOM'): env.setSunProperties(SUN_CONSTANT, sun["sun_temp"], sun["sun_power"], sun["sun_radius_factor"], sc) else: # sun_type == 'DISABLED' or sun_type == None env.setSunProperties(SUN_DISABLED, sun["sun_temp"], sun["sun_power"], sun["sun_radius_factor"], sc) if(sun['sun_location_type'] == 'LATLONG'): env.setSunPositionType(0) l = sun["sun_date"].split(".") date = datetime.date(int(l[2]), int(l[1]), int(l[0])) day = int(date.timetuple().tm_yday) l = sun["sun_time"].split(":") hour = int(l[0]) minute = int(l[1]) time = hour + (minute / 60) env.setSunLongitudeAndLatitude(sun["sun_latlong_lon"], sun["sun_latlong_lat"], sun["sun_latlong_gmt"], day, time) env.setSunRotation(sun["sun_latlong_ground_rotation"]) elif(sun['sun_location_type'] == 'ANGLES'): env.setSunPositionType(1) env.setSunAngles(sun["sun_angles_zenith"], sun["sun_angles_azimuth"]) elif(sun['sun_location_type'] == 'DIRECTION'): env.setSunPositionType(2) env.setSunDirection(Cvector(sun["sun_dir_x"], sun["sun_dir_y"], sun["sun_dir_z"])) if(env_type == 'IMAGE_BASED'): env.enableEnvironment(True) def state(s): # channel state: 0 = Disabled; 1 = Enabled; 2 = Use active sky instead. if(s == 'HDR_IMAGE'): return 1 if(s == 'ACTIVE_SKY'): return 2 if(s == 'SAME_AS_BG'): # same as bg, set the same values as in bg layer return 3 return 0 if(ibl is not None): env.setEnvironmentWeight(ibl["ibl_intensity"]) s = state(ibl["ibl_bg_type"]) env.setEnvironmentLayer(IBL_LAYER_BACKGROUND, ibl["ibl_bg_map"], s, not ibl["ibl_screen_mapping"], not ibl["ibl_interpolation"], ibl["ibl_bg_intensity"], ibl["ibl_bg_scale_x"], ibl["ibl_bg_scale_y"], ibl["ibl_bg_offset_x"], ibl["ibl_bg_offset_y"], ) s = state(ibl["ibl_refl_type"]) if(s == 3): s = state(ibl["ibl_bg_type"]) env.setEnvironmentLayer(IBL_LAYER_REFLECTION, ibl["ibl_bg_map"], s, not ibl["ibl_screen_mapping"], not ibl["ibl_interpolation"], ibl["ibl_bg_intensity"], ibl["ibl_bg_scale_x"], ibl["ibl_bg_scale_y"], ibl["ibl_bg_offset_x"], ibl["ibl_bg_offset_y"], ) else: env.setEnvironmentLayer(IBL_LAYER_REFLECTION, ibl["ibl_refl_map"], s, not ibl["ibl_screen_mapping"], not ibl["ibl_interpolation"], ibl["ibl_refl_intensity"], ibl["ibl_refl_scale_x"], ibl["ibl_refl_scale_y"], ibl["ibl_refl_offset_x"], ibl["ibl_refl_offset_y"], ) s = state(ibl["ibl_refr_type"]) if(s == 3): s = state(ibl["ibl_bg_type"]) env.setEnvironmentLayer(IBL_LAYER_REFRACTION, ibl["ibl_bg_map"], s, not ibl["ibl_screen_mapping"], not ibl["ibl_interpolation"], ibl["ibl_bg_intensity"], ibl["ibl_bg_scale_x"], ibl["ibl_bg_scale_y"], ibl["ibl_bg_offset_x"], ibl["ibl_bg_offset_y"], ) else: env.setEnvironmentLayer(IBL_LAYER_REFRACTION, ibl["ibl_refr_map"], s, not ibl["ibl_screen_mapping"], not ibl["ibl_interpolation"], ibl["ibl_refr_intensity"], ibl["ibl_refr_scale_x"], ibl["ibl_refr_scale_y"], ibl["ibl_refr_offset_x"], ibl["ibl_refr_offset_y"], ) s = state(ibl["ibl_illum_type"]) if(s == 3): s = state(ibl["ibl_bg_type"]) env.setEnvironmentLayer(IBL_LAYER_ILLUMINATION, ibl["ibl_bg_map"], s, not ibl["ibl_screen_mapping"], not ibl["ibl_interpolation"], ibl["ibl_bg_intensity"], ibl["ibl_bg_scale_x"], ibl["ibl_bg_scale_y"], ibl["ibl_bg_offset_x"], ibl["ibl_bg_offset_y"], ) else: env.setEnvironmentLayer(IBL_LAYER_ILLUMINATION, ibl["ibl_illum_map"], s, not ibl["ibl_screen_mapping"], not ibl["ibl_interpolation"], ibl["ibl_illum_intensity"], ibl["ibl_illum_scale_x"], ibl["ibl_illum_scale_y"], ibl["ibl_illum_offset_x"], ibl["ibl_illum_offset_y"], ) else: # env_type == 'NONE' or env_type == None env.setActiveSky('') def parameters(self, scene, materials=None, generals=None, tone=None, simulens=None, illum_caustics=None, other=None, text_overlay=None, ): """Set scene render parameters. scene dict {cpu_threads int, multilight int, multilight_type int, quality string RS1, RS0 sampling_level float, time int, }, materials dict {override bool, override_path string (path), search_path string (path), } or None generals dict {diplacement bool, dispersion bool, motion_blur bool, } or None tone dict {burn float, color_space int, gamma float, sharpness bool, sharpness_value float, tint float, whitepoint float, } or None simulens dict {aperture_map string (path), devignetting bool, devignetting_value float, diffraction bool, diffraction_value float, frequency float, obstacle_map string (path), scattering bool, scattering_value float, } or None illum_caustics dict {illumination int, refl_caustics int, refr_caustics int, } or None other dict {protect bool, } """ s = self.mxs # s.setRenderParameter('ENGINE', scene["quality"]) s.setRenderParameter('ENGINE', bytes(scene["quality"], encoding='UTF-8')) s.setRenderParameter('NUM THREADS', scene["cpu_threads"]) s.setRenderParameter('STOP TIME', scene["time"] * 60) s.setRenderParameter('SAMPLING LEVEL', scene["sampling_level"]) s.setRenderParameter('USE MULTILIGHT', scene["multilight"]) s.setRenderParameter('SAVE LIGHTS IN SEPARATE FILES', scene["multilight_type"]) if(generals is not None): s.setRenderParameter('DO MOTION BLUR', generals["motion_blur"]) s.setRenderParameter('DO DISPLACEMENT', generals["diplacement"]) s.setRenderParameter('DO DISPERSION', generals["dispersion"]) if(illum_caustics is not None): v = illum_caustics['illumination'] if(v == 3): s.setRenderParameter('DO DIRECT LAYER', 0) s.setRenderParameter('DO INDIRECT LAYER', 0) elif(v == 2): s.setRenderParameter('DO DIRECT LAYER', 0) s.setRenderParameter('DO INDIRECT LAYER', 1) elif(v == 1): s.setRenderParameter('DO DIRECT LAYER', 1) s.setRenderParameter('DO INDIRECT LAYER', 0) else: s.setRenderParameter('DO DIRECT LAYER', 1) s.setRenderParameter('DO INDIRECT LAYER', 1) v = illum_caustics['refl_caustics'] if(v == 3): s.setRenderParameter('DO DIRECT REFLECTION CAUSTIC LAYER', 0) s.setRenderParameter('DO INDIRECT REFLECTION CAUSTIC LAYER', 0) elif(v == 2): s.setRenderParameter('DO DIRECT REFLECTION CAUSTIC LAYER', 0) s.setRenderParameter('DO INDIRECT REFLECTION CAUSTIC LAYER', 1) elif(v == 1): s.setRenderParameter('DO DIRECT REFLECTION CAUSTIC LAYER', 1) s.setRenderParameter('DO INDIRECT REFLECTION CAUSTIC LAYER', 0) else: s.setRenderParameter('DO DIRECT REFLECTION CAUSTIC LAYER', 1) s.setRenderParameter('DO INDIRECT REFLECTION CAUSTIC LAYER', 1) v = illum_caustics['refr_caustics'] if(v == 3): s.setRenderParameter('DO DIRECT REFRACTION CAUSTIC LAYER', 0) s.setRenderParameter('DO INDIRECT REFRACTION CAUSTIC LAYER', 0) elif(v == 2): s.setRenderParameter('DO DIRECT REFRACTION CAUSTIC LAYER', 0) s.setRenderParameter('DO INDIRECT REFRACTION CAUSTIC LAYER', 1) elif(v == 1): s.setRenderParameter('DO DIRECT REFRACTION CAUSTIC LAYER', 1) s.setRenderParameter('DO INDIRECT REFRACTION CAUSTIC LAYER', 0) else: s.setRenderParameter('DO DIRECT REFRACTION CAUSTIC LAYER', 1) s.setRenderParameter('DO INDIRECT REFRACTION CAUSTIC LAYER', 1) if(simulens is not None): s.setRenderParameter('DO DEVIGNETTING', simulens["devignetting"]) s.setRenderParameter('DEVIGNETTING', simulens["devignetting_value"]) s.setRenderParameter('DO SCATTERING_LENS', simulens["scattering"]) s.setRenderParameter('SCATTERING_LENS', simulens["scattering_value"]) if(simulens["diffraction"]): s.enableDiffraction() s.setDiffraction(simulens["diffraction_value"], simulens["frequency"], simulens["aperture_map"], simulens["obstacle_map"]) if(tone is not None): s.setRenderParameter('DO SHARPNESS', tone["sharpness"]) s.setRenderParameter('SHARPNESS', tone["sharpness_value"]) s.setToneMapping(tone["gamma"], tone["burn"]) s.setColorSpace(tone["color_space"]) s.setWhitePoint(tone["whitepoint"], tone["tint"]) if(materials is not None): if(materials["override"]): s.setOverrideMaterial(True) if(materials["override_path"] != ""): s.setOverrideMaterial(materials["override_path"]) if(materials["search_path"] != ""): s.addSearchingPath(materials["search_path"]) if(materials["default_material"] != ""): s.setDefaultMaterial(True) s.setDefaultMaterial(materials["default_material"]) else: s.setDefaultMaterial(False) if(other is not None): if(other['protect']): s.enableProtection(True) else: s.enableProtection(False) if(other['extra_sampling_enabled']): s.setRenderParameter('DO EXTRA SAMPLING', 1) s.setRenderParameter('EXTRA SAMPLING SL', other['extra_sampling_sl']) s.setRenderParameter('EXTRA SAMPLING MASK', other['extra_sampling_mask']) if(platform.system() == 'Linux'): # wtf? s.setRenderParameter('EXTRA SAMPLING CUSTOM ALPHA', bytes(other['extra_sampling_custom_alpha'], encoding='UTF-8')) s.setRenderParameter('EXTRA SAMPLING USER BITMAP', bytes(other['extra_sampling_user_bitmap'], encoding='UTF-8')) else: s.setRenderParameter('EXTRA SAMPLING CUSTOM ALPHA', other['extra_sampling_custom_alpha']) s.setRenderParameter('EXTRA SAMPLING USER BITMAP', other['extra_sampling_user_bitmap']) if(other['extra_sampling_invert']): s.setRenderParameter('EXTRA SAMPLING INVERT', 1) if(text_overlay is not None): if(text_overlay['enabled']): o = CoverlayTextOptions() o.enabled_ = 1 o.text_ = Cstring(text_overlay['text']) o.position_ = text_overlay['position'] c = Crgb() c.assign(*text_overlay['color']) o.color_ = c.toRGB8() o.backgroundEnabled_ = text_overlay['background'] c = Crgb() c.assign(*text_overlay['background_color']) o.backgroundColor_ = c.toRGB8() s.setOverlayTextOptions(o) def channels(self, base_path, mxi, image, image_depth='RGB8', channels_output_mode=0, channels_render=True, channels_render_type=0, channels=None, ): """Set scene render channels. base_path string (path) mxi string (path) image string (path) image_depth string RGB8, RGB16, RGB32 channels_output_mode int channels_render bool channels_render_type int channels dict {channels_alpha bool channels_alpha_file string channels_alpha_opaque bool channels_custom_alpha bool channels_custom_alpha_file string channels_deep bool channels_deep_file string channels_deep_max_samples int channels_deep_min_dist float channels_deep_type int channels_fresnel bool channels_fresnel_file string channels_material_id bool channels_material_id_file string channels_motion_vector bool channels_motion_vector_file string channels_normals bool channels_normals_file string channels_normals_space int channels_object_id bool channels_object_id_file string channels_position bool channels_position_file string channels_position_space int channels_roughness bool channels_roughness_file string channels_shadow bool channels_shadow_file string channels_uv bool channels_uv_file string channels_z_buffer bool channels_z_buffer_far float channels_z_buffer_file string channels_z_buffer_near float} or None """ def get_ext_depth(t, e=None): if(e is not None): t = "{}{}".format(e[1:].upper(), int(t[3:])) if(t == 'RGB8'): return ('.tif', 8) elif(t == 'RGB16'): return ('.tif', 16) elif(t == 'RGB32'): return ('.tif', 32) elif(t == 'PNG8'): return ('.png', 8) elif(t == 'PNG16'): return ('.png', 16) elif(t == 'TGA'): return ('.tga', 8) elif(t == 'TIF8'): return ('.tif', 8) elif(t == 'TIF16'): return ('.tif', 16) elif(t == 'TIF32'): return ('.tif', 32) elif(t == 'EXR16'): return ('.exr', 16) elif(t == 'EXR32'): return ('.exr', 32) elif(t == 'EXR_DEEP'): return ('.exr', 32) elif(t == 'JPG'): return ('.jpg', 8) elif(t == 'JP2'): return ('.jp2', 8) elif(t == 'HDR'): return ('.hdr', 32) elif(t == 'DTEX'): return ('.dtex', 32) elif(t == 'PSD8'): return ('.psd', 8) elif(t == 'PSD16'): return ('.psd', 16) elif(t == 'PSD32'): return ('.psd', 32) else: return ('.tif', 8) s = self.mxs s.setRenderParameter('DO NOT SAVE MXI FILE', (mxi is None)) s.setRenderParameter('DO NOT SAVE IMAGE FILE', (image is None)) if(mxi is not None): # s.setRenderParameter('MXI FULLNAME', mxi) # s.setRenderParameter('MXI FULLNAME', bytes(mxi, encoding='UTF-8')) if(platform.system() == 'Linux'): # wtf? s.setRenderParameter('MXI FULLNAME', bytes(mxi, encoding='UTF-8')) else: # s.setRenderParameter('MXI FULLNAME', mxi) s.setRenderParameter('MXI FULLNAME', bytes(mxi, encoding='UTF-8')) if(image is not None): if(image_depth is None): image_depth = 'RGB8' _, depth = get_ext_depth(image_depth, os.path.splitext(os.path.split(image)[1])[1]) s.setPath('RENDER', image, depth) s.setRenderParameter('DO RENDER CHANNEL', int(channels_render)) s.setRenderParameter('EMBED CHANNELS', channels_output_mode) s.setRenderParameter('RENDER LAYERS', channels_render_type) if(channels is not None): e, depth = get_ext_depth(channels["channels_alpha_file"]) s.setPath('ALPHA', "{}_alpha{}".format(base_path, e), depth) e, depth = get_ext_depth(channels["channels_shadow_file"]) s.setPath('SHADOW', "{}_shadow{}".format(base_path, e), depth) e, depth = get_ext_depth(channels["channels_object_id_file"]) s.setPath('OBJECT', "{}_object_id{}".format(base_path, e), depth) e, depth = get_ext_depth(channels["channels_material_id_file"]) s.setPath('MATERIAL', "{}_material_id{}".format(base_path, e), depth) e, depth = get_ext_depth(channels["channels_motion_vector_file"]) s.setPath('MOTION', "{}_motion_vector{}".format(base_path, e), depth) e, depth = get_ext_depth(channels["channels_z_buffer_file"]) s.setPath('Z', "{}_z_buffer{}".format(base_path, e), depth) e, depth = get_ext_depth(channels["channels_roughness_file"]) s.setPath('ROUGHNESS', "{}_roughness{}".format(base_path, e), depth) e, depth = get_ext_depth(channels["channels_fresnel_file"]) s.setPath('FRESNEL', "{}_fresnel{}".format(base_path, e), depth) e, depth = get_ext_depth(channels["channels_normals_file"]) s.setPath('NORMALS', "{}_normals{}".format(base_path, e), depth) e, depth = get_ext_depth(channels["channels_position_file"]) s.setPath('POSITION', "{}_position{}".format(base_path, e), depth) e, depth = get_ext_depth(channels["channels_deep_file"]) s.setPath('DEEP', "{}_deep{}".format(base_path, e), depth) e, depth = get_ext_depth(channels["channels_uv_file"]) s.setPath('UV', "{}_uv{}".format(base_path, e), depth) e, depth = get_ext_depth(channels["channels_custom_alpha_file"]) s.setPath('ALPHA_CUSTOM', "{}_custom_alpha{}".format(base_path, e), depth) e, depth = get_ext_depth(channels["channels_reflectance_file"]) s.setPath('REFLECTANCE', "{}_reflectance{}".format(base_path, e), depth) s.setRenderParameter('DO ALPHA CHANNEL', int(channels["channels_alpha"])) s.setRenderParameter('OPAQUE ALPHA', int(channels["channels_alpha_opaque"])) s.setRenderParameter('DO IDOBJECT CHANNEL', int(channels["channels_object_id"])) s.setRenderParameter('DO IDMATERIAL CHANNEL', int(channels["channels_material_id"])) s.setRenderParameter('DO SHADOW PASS CHANNEL', int(channels["channels_shadow"])) s.setRenderParameter('DO MOTION CHANNEL', int(channels["channels_motion_vector"])) s.setRenderParameter('DO ROUGHNESS CHANNEL', int(channels["channels_roughness"])) s.setRenderParameter('DO FRESNEL CHANNEL', int(channels["channels_fresnel"])) s.setRenderParameter('DO NORMALS CHANNEL', int(channels["channels_normals"])) s.setRenderParameter('NORMALS CHANNEL SPACE', channels["channels_normals_space"]) s.setRenderParameter('POSITION CHANNEL SPACE', channels["channels_position_space"]) s.setRenderParameter('DO POSITION CHANNEL', int(channels["channels_position"])) s.setRenderParameter('DO ZBUFFER CHANNEL', int(channels["channels_z_buffer"])) s.setRenderParameter('ZBUFFER RANGE', (channels["channels_z_buffer_near"], channels["channels_z_buffer_far"])) s.setRenderParameter('DO DEEP CHANNEL', int(channels["channels_deep"])) s.setRenderParameter('DEEP CHANNEL TYPE', channels["channels_deep_type"]) s.setRenderParameter('DEEP MIN DISTANCE', channels["channels_deep_min_dist"]) s.setRenderParameter('DEEP MAX SAMPLES', channels["channels_deep_max_samples"]) s.setRenderParameter('DO UV CHANNEL', int(channels["channels_uv"])) # s.setRenderParameter('MOTION CHANNEL TYPE', ?) s.setRenderParameter('DO ALPHA CUSTOM CHANNEL', int(channels["channels_custom_alpha"])) s.setRenderParameter('DO REFLECTANCE CHANNEL', int(channels["channels_reflectance"])) def custom_alphas(self, groups, ): """Set custom alphas. groups list of dicts: {'name': string, 'objects': list of strings, 'opaque': bool, } """ s = self.mxs def get_material_names(s): it = CmaxwellMaterialIterator() o = it.first(s) l = [] while not o.isNull(): name = o.getName() l.append(name) o = it.next() return l def get_object_names(s): it = CmaxwellObjectIterator() o = it.first(s) l = [] while not o.isNull(): name, _ = o.getName() l.append(name) o = it.next() return l sobs = get_object_names(s) smats = get_material_names(s) for a in groups: s.createCustomAlphaChannel(a['name'], a['opaque']) for n in a['objects']: if(n in sobs): o = s.getObject(n) o.addToCustomAlpha(a['name']) for n in a['materials']: if(n in smats): m = s.getMaterial(n) m.addToCustomAlpha(a['name']) def ext_particles(self, name, properties, matrix, motion=None, object_props=None, material=None, backface_material=None, ): """Create particles object. name string properties dict base ((3 float), (3 float), (3 float), (3 float)) pivot ((3 float), (3 float), (3 float), (3 float)) object_props (bool hide, float opacity, tuple cid=(int, int, int), bool hcam, bool hcamsc, bool hgi, bool hrr, bool hzcp, ) or None material (string path, bool embed) or None backface_material (string path, bool embed) or None """ s = self.mxs e = self.mgr.createDefaultGeometryProceduralExtension('MaxwellParticles') p = e.getExtensionData() d = properties if(d['embed'] is True): c = Cbase() c.origin = Cvector(0.0, 0.0, 0.0) c.xAxis = Cvector(1.0, 0.0, 0.0) c.yAxis = Cvector(0.0, 1.0, 0.0) c.zAxis = Cvector(0.0, 0.0, 1.0) p.setFloatArray('PARTICLE_POSITIONS', list(d['pdata']['PARTICLE_POSITIONS']), c) p.setFloatArray('PARTICLE_SPEEDS', list(d['pdata']['PARTICLE_SPEEDS']), c) p.setFloatArray('PARTICLE_RADII', list(d['pdata']['PARTICLE_RADII']), c) p.setIntArray('PARTICLE_IDS', list(d['pdata']['PARTICLE_IDS'])) p.setFloatArray('PARTICLE_NORMALS', list(d['pdata']['PARTICLE_NORMALS']), c) p.setFloatArray('PARTICLE_UVW', list(d['pdata']['PARTICLE_UVW']), c) else: p.setString('FileName', d['filename']) p.setFloat('Radius Factor', d['radius_multiplier']) p.setFloat('MB Factor', d['motion_blur_multiplier']) p.setFloat('Shutter 1/', d['shutter_speed']) p.setFloat('Load particles %', d['load_particles']) p.setUInt('Axis', d['axis_system']) p.setInt('Frame#', d['frame_number']) p.setFloat('fps', d['fps']) p.setInt('Create N particles per particle', d['extra_create_np_pp']) p.setFloat('Extra particles dispersion', d['extra_dispersion']) p.setFloat('Extra particles deformation', d['extra_deformation']) p.setByte('Load particle Force', d['load_force']) p.setByte('Load particle Vorticity', d['load_vorticity']) p.setByte('Load particle Normal', d['load_normal']) p.setByte('Load particle neighbors no.', d['load_neighbors_num']) p.setByte('Load particle UV', d['load_uv']) p.setByte('Load particle Age', d['load_age']) p.setByte('Load particle Isolation Time', d['load_isolation_time']) p.setByte('Load particle Viscosity', d['load_viscosity']) p.setByte('Load particle Density', d['load_density']) p.setByte('Load particle Pressure', d['load_pressure']) p.setByte('Load particle Mass', d['load_mass']) p.setByte('Load particle Temperature', d['load_temperature']) p.setByte('Load particle ID', d['load_id']) p.setFloat('Min Force', d['min_force']) p.setFloat('Max Force', d['max_force']) p.setFloat('Min Vorticity', d['min_vorticity']) p.setFloat('Max Vorticity', d['max_vorticity']) p.setInt('Min Nneighbors', d['min_nneighbors']) p.setInt('Max Nneighbors', d['max_nneighbors']) p.setFloat('Min Age', d['min_age']) p.setFloat('Max Age', d['max_age']) p.setFloat('Min Isolation Time', d['min_isolation_time']) p.setFloat('Max Isolation Time', d['max_isolation_time']) p.setFloat('Min Viscosity', d['min_viscosity']) p.setFloat('Max Viscosity', d['max_viscosity']) p.setFloat('Min Density', d['min_density']) p.setFloat('Max Density', d['max_density']) p.setFloat('Min Pressure', d['min_pressure']) p.setFloat('Max Pressure', d['max_pressure']) p.setFloat('Min Mass', d['min_mass']) p.setFloat('Max Mass', d['max_mass']) p.setFloat('Min Temperature', d['min_temperature']) p.setFloat('Max Temperature', d['max_temperature']) p.setFloat('Min Velocity', d['min_velocity']) p.setFloat('Max Velocity', d['max_velocity']) o = s.createGeometryProceduralObject(name, p) a, _ = o.addChannelUVW() o.generateCustomUVW(0, a) self.set_base_and_pivot(o, matrix, motion, ) if(object_props is not None): self.set_object_props(o, *object_props) if(material is not None): if(material != ''): mat = self.get_material(material) o.setMaterial(mat) if(backface_material is not None): if(backface_material != ''): mat = self.get_material(backface_material) o.setBackfaceMaterial(mat) return o def ext_hair(self, name, extension, matrix, motion, root_radius, tip_radius, data, object_props=None, display_percent=10, display_max=1000, material=None, backface_material=None, ): """Create hair/grass object. name string extension string ('MaxwellHair' ,'MGrassP') base ((3 float), (3 float), (3 float), (3 float)) pivot ((3 float), (3 float), (3 float), (3 float)) root_radius float tip_radius float data dict of extension data object_props (bool hide, float opacity, tuple cid=(int, int, int), bool hcam, bool hcamsc, bool hgi, bool hrr, bool hzcp, ) or None display_percent int display_max int material (string path, bool embed) or None backface_material (string path, bool embed) or None """ s = self.mxs e = self.mgr.createDefaultGeometryProceduralExtension(extension) p = e.getExtensionData() p.setByteArray('HAIR_MAJOR_VER', data['HAIR_MAJOR_VER']) p.setByteArray('HAIR_MINOR_VER', data['HAIR_MINOR_VER']) p.setByteArray('HAIR_FLAG_ROOT_UVS', data['HAIR_FLAG_ROOT_UVS']) m = memoryview(struct.pack("I", data['HAIR_GUIDES_COUNT'][0])).tolist() p.setByteArray('HAIR_GUIDES_COUNT', m) m = memoryview(struct.pack("I", data['HAIR_GUIDES_POINT_COUNT'][0])).tolist() p.setByteArray('HAIR_GUIDES_POINT_COUNT', m) c = Cbase() c.origin = Cvector(0.0, 0.0, 0.0) c.xAxis = Cvector(1.0, 0.0, 0.0) c.yAxis = Cvector(0.0, 1.0, 0.0) c.zAxis = Cvector(0.0, 0.0, 1.0) p.setFloatArray('HAIR_POINTS', list(data['HAIR_POINTS']), c) p.setFloatArray('HAIR_NORMALS', list(data['HAIR_NORMALS']), c) if(data['HAIR_FLAG_ROOT_UVS'][0] == 1): p.setFloatArray('HAIR_ROOT_UVS', list(data['HAIR_ROOT_UVS']), c) p.setUInt('Display Percent', display_percent) if(extension == 'MaxwellHair'): p.setUInt('Display Max. Hairs', display_max) p.setDouble('Root Radius', root_radius) p.setDouble('Tip Radius', tip_radius) if(extension == 'MGrassP'): p.setUInt('Display Max. Hairs', display_max) p.setDouble('Root Radius', root_radius) p.setDouble('Tip Radius', tip_radius) o = s.createGeometryProceduralObject(name, p) if(extension == 'MaxwellHair'): a, _ = o.addChannelUVW() o.generateCustomUVW(0, a) b, _ = o.addChannelUVW() o.generateCustomUVW(1, b) c, _ = o.addChannelUVW() o.generateCustomUVW(2, c) if(extension == 'MGrassP'): a, _ = o.addChannelUVW() o.generateCustomUVW(0, a) b, _ = o.addChannelUVW() o.generateCustomUVW(1, b) self.set_base_and_pivot(o, matrix, motion, ) if(object_props is not None): self.set_object_props(o, *object_props) if(material is not None): if(material != ''): mat = self.get_material(material) o.setMaterial(mat) if(backface_material is not None): if(backface_material != ''): mat = self.get_material(backface_material) o.setBackfaceMaterial(mat) return o def ext_sea(self, name, matrix, motion=None, object_props=None, geometry=None, wind=None, material=None, backface_material=None, ): """Create sea extension object. name string base ((3 float), (3 float), (3 float), (3 float)) pivot ((3 float), (3 float), (3 float), (3 float)) object_props (bool hide, float opacity, tuple cid=(int, int, int), bool hcam, bool hcamsc, bool hgi, bool hrr, bool hzcp, ) or None geometry (float reference_time, int resolution, float ocean_depth, float vertical_scale, float ocean_dim, int ocean_seed, bool enable_choppyness, float choppy_factor, ) wind (float ocean_wind_mod, float ocean_wind_dir, float ocean_wind_alignment, float ocean_min_wave_length, float damp_factor_against_wind, ) material (string path, bool embed) or None backface_material (string path, bool embed) or None """ s = self.mxs e = self.mgr.createDefaultGeometryLoaderExtension('MaxwellSea') p = e.getExtensionData() p.setFloat('Reference Time', geometry[0]) p.setUInt('Resolution', geometry[1]) p.setFloat('Ocean Depth', geometry[2]) p.setFloat('Vertical Scale', geometry[3]) p.setFloat('Ocean Dim', geometry[4]) p.setUInt('Ocean Seed', geometry[5]) p.setByte('Enable Choppyness', geometry[6]) p.setFloat('Choppy factor', geometry[7]) p.setByte('Enable White Caps', geometry[8]) p.setFloat('Ocean Wind Mod.', wind[0]) p.setFloat('Ocean Wind Dir.', wind[1]) p.setFloat('Ocean Wind Alignment', wind[2]) p.setFloat('Ocean Min. Wave Length', wind[3]) p.setFloat('Damp Factor Against Wind', wind[4]) o = s.createGeometryLoaderObject(name, p) self.set_base_and_pivot(o, matrix, motion, ) if(object_props is not None): self.set_object_props(o, *object_props) if(material is not None): if(material != ''): mat = self.get_material(material) o.setMaterial(mat) if(backface_material is not None): if(backface_material != ''): mat = self.get_material(backface_material) o.setBackfaceMaterial(mat) def ext_volumetrics(self, name, properties, matrix, motion=None, object_props=None, material=None, backface_material=None, ): """Create Volumetrics Extension Object. name string properties (int type 1, float density) or (int type 2, float density, int seed, float low, float high, float detail, int octaves, float perssistence) base ((3 float), (3 float), (3 float), (3 float)) pivot ((3 float), (3 float), (3 float), (3 float)) object_props (bool hide, float opacity, tuple cid=(int, int, int), bool hcam, bool hcamsc, bool hgi, bool hrr, bool hzcp, ) or None material (string path, bool embed) or None backface_material (string path, bool embed) or None """ s = self.mxs e = self.mgr.createDefaultGeometryProceduralExtension('MaxwellVolumetric') p = e.getExtensionData() d = properties p.setByte('Create Constant Density', d[0]) p.setFloat('ConstantDensity', d[1]) if(d[0] == 2): p.setUInt('Seed', d[2]) p.setFloat('Low value', d[3]) p.setFloat('High value', d[4]) p.setFloat('Detail', d[5]) p.setInt('Octaves', d[6]) p.setFloat('Persistance', d[7]) o = s.createGeometryProceduralObject(name, p) self.set_base_and_pivot(o, matrix, motion, ) if(object_props is not None): self.set_object_props(o, *object_props) if(material is not None): if(material != ''): mat = self.get_material(material) o.setMaterial(mat) if(backface_material is not None): if(backface_material != ''): mat = self.get_material(backface_material) o.setBackfaceMaterial(mat) return o def mod_grass(self, object_name, properties, material=None, backface_material=None, ): """Create grass object modifier extension. object_name string properties dict of many, many properties, see code.. material (string path, bool embed) or None backface_material (string path, bool embed) or None """ s = self.mxs e = self.mgr.createDefaultGeometryModifierExtension('MaxwellGrass') p = e.getExtensionData() if(material is not None): mat = self.get_material(material) if(mat is not None): p.setString('Material', mat.getName()) if(backface_material is not None): mat = self.get_material(backface_material) if(mat is not None): p.setString('Double Sided Material', mat.getName()) p.setUInt('Density', properties['density']) self.texture_data_to_mxparams('Density Map', properties['density_map'], p, ) p.setFloat('Length', properties['length']) self.texture_data_to_mxparams('Length Map', properties['length_map'], p, ) p.setFloat('Length Variation', properties['length_variation']) p.setFloat('Root Width', properties['root_width']) p.setFloat('Tip Width', properties['tip_width']) p.setFloat('Direction Type', properties['direction_type']) p.setFloat('Initial Angle', properties['initial_angle']) p.setFloat('Initial Angle Variation', properties['initial_angle_variation']) self.texture_data_to_mxparams('Initial Angle Map', properties['initial_angle_map'], p, ) p.setFloat('Start Bend', properties['start_bend']) p.setFloat('Start Bend Variation', properties['start_bend_variation']) self.texture_data_to_mxparams('Start Bend Map', properties['start_bend_map'], p, ) p.setFloat('Bend Radius', properties['bend_radius']) p.setFloat('Bend Radius Variation', properties['bend_radius_variation']) self.texture_data_to_mxparams('Bend Radius Map', properties['bend_radius_map'], p, ) p.setFloat('Bend Angle', properties['bend_angle']) p.setFloat('Bend Angle Variation', properties['bend_angle_variation']) self.texture_data_to_mxparams('Bend Angle Map', properties['bend_angle_map'], p, ) p.setFloat('Cut Off', properties['cut_off']) p.setFloat('Cut Off Variation', properties['cut_off_variation']) self.texture_data_to_mxparams('Cut Off Map', properties['cut_off_map'], p, ) p.setUInt('Points per Blade', properties['points_per_blade']) p.setUInt('Primitive Type', properties['primitive_type']) p.setUInt('Seed', properties['seed']) p.setByte('Enable LOD', properties['lod']) p.setFloat('LOD Min Distance', properties['lod_min_distance']) p.setFloat('LOD Max Distance', properties['lod_max_distance']) p.setFloat('LOD Max Distance Density', properties['lod_max_distance_density']) p.setUInt('Display Percent', properties['display_percent']) p.setUInt('Display Max. Blades', properties['display_max_blades']) o = s.getObject(object_name) o.applyGeometryModifierExtension(p) return o def mod_subdivision(self, object_name, level=2, scheme=0, interpolation=2, crease=0.0, smooth_angle=90.0, quads=None, ): """Create subdivision object modifier extension. object_name string level int scheme int (0, "Catmull-Clark"), (1, "Loop") interpolation int (0, "None"), (1, "Edges"), (2, "Edges And Corners"), (3, "Sharp") crease float smooth float quads [[int, int], ...] or None """ s = self.mxs e = self.mgr.createDefaultGeometryModifierExtension('SubdivisionModifier') p = e.getExtensionData() p.setUInt('Subdivision Level', level) p.setUInt('Subdivision Scheme', scheme) p.setUInt('Interpolation', interpolation) p.setFloat('Crease', crease) p.setFloat('Smooth Angle', smooth_angle) o = s.getObject(object_name) if(scheme == 0 and quads is not None): for t, q in quads: o.setTriangleQuadBuddy(t, q) o.applyGeometryModifierExtension(p) return o def mod_scatter(self, object_name, scatter_object, inherit_objectid=False, remove_overlapped=False, density=None, seed=0, scale=None, rotation=None, lod=None, angle=None, display_percent=10, display_max=1000, ): """Create scatter object modifier extension. object_name string scatter_object string inherit_objectid bool density (float, density_map or None) or None seed int scale ((float, float, float), scale_map or None, scale_variation (float, float, float)) or None rotation ((float, float, float), rotation_map or None, rotation_variation (float, float, float), rotation_direction int (0, "Polygon Normal"), (1, "World Z")) or None lod (bool, lod_min_distance float, lod_max_distance float, lod_max_distance_density float) or None display_percent int display_max int """ s = self.mxs e = self.mgr.createDefaultGeometryModifierExtension('MaxwellScatter') p = e.getExtensionData() p.setString('Object', scatter_object) p.setByte('Inherit ObjectID', inherit_objectid) if(density is not None): p.setFloat('Density', density[0]) self.texture_data_to_mxparams('Density Map', density[1], p, ) p.setUInt('Seed', seed) p.setByte('Remove Overlapped', remove_overlapped) if(scale is not None): p.setFloat('Scale X', scale[0]) p.setFloat('Scale Y', scale[1]) p.setFloat('Scale Z', scale[2]) self.texture_data_to_mxparams('Scale Map', scale[3], p, ) p.setFloat('Scale X Variation', scale[4]) p.setFloat('Scale Y Variation', scale[5]) p.setFloat('Scale Z Variation', scale[6]) p.setByte('Uniform Scale', scale[7]) if(rotation is not None): p.setFloat('Rotation X', rotation[0]) p.setFloat('Rotation Y', rotation[1]) p.setFloat('Rotation Z', rotation[2]) self.texture_data_to_mxparams('Rotation Map', rotation[3], p, ) p.setFloat('Rotation X Variation', rotation[4]) p.setFloat('Rotation Y Variation', rotation[5]) p.setFloat('Rotation Z Variation', rotation[6]) p.setUInt('Direction Type', rotation[7]) if(lod is not None): p.setByte('Enable LOD', lod[0]) p.setFloat('LOD Min Distance', lod[1]) p.setFloat('LOD Max Distance', lod[2]) p.setFloat('LOD Max Distance Density', lod[3]) if(angle is not None): p.setFloat('Direction Type', angle[0]) p.setFloat('Initial Angle', angle[1]) p.setFloat('Initial Angle Variation', angle[2]) self.texture_data_to_mxparams('Initial Angle Map', angle[3], p, ) p.setUInt('Display Percent', display_percent) p.setUInt('Display Max. Blades', display_max) o = s.getObject(object_name) o.applyGeometryModifierExtension(p) return o def mod_cloner(self, object_name, cloned_object, render_emitter, pdata, radius=1.0, mb_factor=1.0, load_percent=100.0, start_offset=0, ex_npp=0, ex_p_dispersion=0.0, ex_p_deformation=0.0, align_to_velocity=False, scale_with_radius=False, inherit_obj_id=False, frame=1, fps=24.0, display_percent=10, display_max=1000, ): """Create cloner object modifier extension. object_name string cloned_object string render_emitter bool pdata string or dict radius float mb_factor float load_percent float start_offset int ex_npp int ex_p_dispersion float ex_p_deformation float align_to_velocity bool scale_with_radius bool inherit_obj_id bool frame int fps float display_percent int display_max int """ s = self.mxs e = self.mgr.createDefaultGeometryModifierExtension('MaxwellCloner') p = e.getExtensionData() if(type(pdata) is dict): c = Cbase() c.origin = Cvector(0.0, 0.0, 0.0) c.xAxis = Cvector(1.0, 0.0, 0.0) c.yAxis = Cvector(0.0, 1.0, 0.0) c.zAxis = Cvector(0.0, 0.0, 1.0) p.setFloatArray('PARTICLE_POSITIONS', list(pdata['PARTICLE_POSITIONS']), c) p.setFloatArray('PARTICLE_SPEEDS', list(pdata['PARTICLE_SPEEDS']), c) p.setFloatArray('PARTICLE_RADII', list(pdata['PARTICLE_RADII']), c) p.setIntArray('PARTICLE_IDS', list(pdata['PARTICLE_IDS'])) else: p.setString('FileName', pdata) p.setFloat('Radius Factor', radius) p.setFloat('MB Factor', mb_factor) p.setFloat('Load particles %', load_percent) p.setUInt('Start offset', start_offset) p.setUInt('Create N particles per particle', ex_npp) p.setFloat('Extra particles dispersion', ex_p_dispersion) p.setFloat('Extra particles deformation', ex_p_deformation) p.setByte('Use velocity', align_to_velocity) p.setByte('Scale with particle radius', scale_with_radius) p.setByte('Inherit ObjectID', inherit_obj_id) p.setInt('Frame#', frame) p.setFloat('fps', fps) p.setUInt('Display Percent', display_percent) p.setUInt('Display Max. Particles', display_max) if(not render_emitter): o = s.getObject(object_name) o.setHide(True) o = s.getObject(cloned_object) o.applyGeometryModifierExtension(p) return o def wireframe_override_object_materials(self, clay_mat_name, wire_base_name, ): s = self.mxs it = CmaxwellObjectIterator() o = it.first(scene) l = [] while not o.isNull(): name, _ = o.getName() l.append(name) o = it.next() for o in l: # do not set material to wire base if(o.getName()[0] != wire_base_name): if(o.isInstance()[0] == 1): instanced = o.getInstanced() # do not set material to wire base instances if(instanced.getName()[0] != wire_base_name): o.setMaterial(clay_mat_name) else: o.setMaterial(clay_mat_name) def wireframe_zero_scale_base(self, wire_base_name): s = self.mxs o = s.getObject(wire_base_name) z = (0.0, 0.0, 0.0) b = Cbase() b.origin = Cvector(*z) b.xAxis = Cvector(*z) b.yAxis = Cvector(*z) b.zAxis = Cvector(*z) p = Cbase() p.origin = Cvector(*z) p.xAxis = Cvector(1.0, 0.0, 0.0) p.yAxis = Cvector(0.0, 1.0, 0.0) p.zAxis = Cvector(0.0, 0.0, 1.0) o.setBaseAndPivot(b, p) o.setScale(Cvector(0, 0, 0)) class MXMWriter(): def __init__(self, path, data, ): """Create Extension MXM. path string (path) data dict """ if(__name__ != "__main__"): if(platform.system() == 'Darwin'): raise ImportError("No pymaxwell for Mac OS X..") log(self.__class__.__name__, 1, LogStyles.MESSAGE, prefix="* ", ) self.path = path self.mxs = Cmaxwell(mwcallback) self.mgr = CextensionManager.instance() self.mgr.loadAllExtensions() mat = self.material(data) if(mat is not None): log("writing to: {}".format(self.path), 2, prefix="* ", ) mat.write(path) log("done.", 2, prefix="* ", ) else: raise RuntimeError("Something unexpected happened..") def texture_data_to_mxparams(self, name, data, mxparams, ): """Create CtextureMap, fill with parameters and put into mxparams. name string data dict {'type': string, 'path': string, 'channel': int, 'use_global_map': bool, 'tile_method_type': [bool, bool], 'tile_method_units': int, 'repeat': [float, float], 'mirror': [bool, bool], 'offset': [float, float], 'rotation': float, 'invert': bool, 'alpha_only': bool, 'interpolation': bool, 'brightness': float, 'contrast': float, 'saturation': float, 'hue': float, 'rgb_clamp': [float, float], } mxparams mxparams """ d = data if(d is None): return t = CtextureMap() t.setPath(d['path']) v = Cvector2D() v.assign(*d['repeat']) t.scale = v v = Cvector2D() v.assign(*d['offset']) t.offset = v t.rotation = d['rotation'] t.uvwChannelID = d['channel'] t.uIsTiled = d['tile_method_type'][0] t.vIsTiled = d['tile_method_type'][1] t.uIsMirrored = d['mirror'][0] t.vIsMirrored = d['mirror'][1] t.invert = d['invert'] # t.doGammaCorrection = 0 t.useAbsoluteUnits = d['tile_method_units'] t.normalMappingFlipRed = d['normal_mapping_flip_red'] t.normalMappingFlipGreen = d['normal_mapping_flip_green'] t.normalMappingFullRangeBlue = d['normal_mapping_full_range_blue'] t.useAlpha = d['alpha_only'] t.typeInterpolation = d['interpolation'] t.saturation = d['saturation'] / 100 t.contrast = d['contrast'] / 100 t.brightness = d['brightness'] / 100 t.hue = d['hue'] / 180 t.clampMin = d['rgb_clamp'][0] / 255 t.clampMax = d['rgb_clamp'][1] / 255 t.useGlobalMap = d['use_global_map'] # t.cosA = 1.000000 # t.sinA = 0.000000 ok = mxparams.setTextureMap(name, t) return mxparams def texture(self, d, ): """Create CtextureMap from parameters d dict """ s = self.mxs t = CtextureMap() t.setPath(d['path']) t.uvwChannelID = d['channel'] t.brightness = d['brightness'] / 100 t.contrast = d['contrast'] / 100 t.saturation = d['saturation'] / 100 t.hue = d['hue'] / 180 t.useGlobalMap = d['use_global_map'] t.useAbsoluteUnits = d['tile_method_units'] t.uIsTiled = d['tile_method_type'][0] t.vIsTiled = d['tile_method_type'][1] t.uIsMirrored = d['mirror'][0] t.vIsMirrored = d['mirror'][1] vec = Cvector2D() vec.assign(d['offset'][0], d['offset'][1]) t.offset = vec t.rotation = d['rotation'] t.invert = d['invert'] t.useAlpha = d['alpha_only'] if(d['interpolation']): t.typeInterpolation = 1 else: t.typeInterpolation = 0 t.clampMin = d['rgb_clamp'][0] / 255 t.clampMax = d['rgb_clamp'][1] / 255 vec = Cvector2D() vec.assign(d['repeat'][0], d['repeat'][1]) t.scale = vec t.normalMappingFlipRed = d['normal_mapping_flip_red'] t.normalMappingFlipGreen = d['normal_mapping_flip_green'] t.normalMappingFullRangeBlue = d['normal_mapping_full_range_blue'] for i, pt in enumerate(d['procedural']): if(pt['use'] == 'BRICK'): e = self.mgr.createDefaultTextureExtension('Brick') p = e.getExtensionData() p.setFloat('Blend procedural', pt['blending_factor']) p.setFloat('Brick width', pt['brick_brick_width']) p.setFloat('Brick height', pt['brick_brick_height']) p.setInt('Brick offset', pt['brick_brick_offset']) p.setInt('Random offset', pt['brick_random_offset']) p.setByte('Double brick', pt['brick_double_brick']) p.setFloat('Small brick width', pt['brick_small_brick_width']) p.setByte('Round corners', pt['brick_round_corners']) p.setFloat('Boundary sharpness U', pt['brick_boundary_sharpness_u']) p.setFloat('Boundary sharpness V', pt['brick_boundary_sharpness_v']) p.setInt('Boundary noise detail', pt['brick_boundary_noise_detail']) p.setFloat('Boundary noise region U', pt['brick_boundary_noise_region_u']) p.setFloat('Boundary noise region V', pt['brick_boundary_noise_region_v']) p.setUInt('Seed', pt['brick_seed']) p.setByte('Random rotation', pt['brick_random_rotation']) p.setInt('Color variation', pt['brick_color_variation']) c = Crgb() c.assign(*pt['brick_brick_color_0']) p.setRgb('Brick color 0', c) self.texture_data_to_mxparams('Brick texture 0', pt['brick_brick_texture_0'], p, ) p.setInt('Sampling factor 0', pt['brick_sampling_factor_0']) p.setInt('Weight 0', pt['brick_weight_0']) c = Crgb() c.assign(*pt['brick_brick_color_1']) p.setRgb('Brick color 1', c) self.texture_data_to_mxparams('Brick texture 1', pt['brick_brick_texture_1'], p, ) p.setInt('Sampling factor 1', pt['brick_sampling_factor_1']) p.setInt('Weight 1', pt['brick_weight_1']) c = Crgb() c.assign(*pt['brick_brick_color_2']) p.setRgb('Brick color 2', c) self.texture_data_to_mxparams('Brick texture 2', pt['brick_brick_texture_2'], p, ) p.setInt('Sampling factor 2', pt['brick_sampling_factor_2']) p.setInt('Weight 2', pt['brick_weight_2']) p.setFloat('Mortar thickness', pt['brick_mortar_thickness']) c = Crgb() c.assign(*pt['brick_mortar_color']) p.setRgb('Mortar color', c) self.texture_data_to_mxparams('Mortar texture', pt['brick_mortar_texture'], p, ) t.addProceduralTexture(p) elif(pt['use'] == 'CHECKER'): e = self.mgr.createDefaultTextureExtension('Checker') p = e.getExtensionData() p.setFloat('Blend procedural', pt['blending_factor']) c = Crgb() c.assign(*pt['checker_color_0']) p.setRgb('Color0', c) c = Crgb() c.assign(*pt['checker_color_1']) p.setRgb('Color1', c) p.setUInt('Number of elements U', pt['checker_number_of_elements_u']) p.setUInt('Number of elements V', pt['checker_number_of_elements_v']) p.setFloat('Transition sharpness', pt['checker_transition_sharpness']) p.setUInt('Fall-off', pt['checker_falloff']) t.addProceduralTexture(p) elif(pt['use'] == 'CIRCLE'): e = self.mgr.createDefaultTextureExtension('Circle') p = e.getExtensionData() p.setFloat('Blend procedural', pt['blending_factor']) c = Crgb() c.assign(*pt['circle_background_color']) p.setRgb('Background color', c) c = Crgb() c.assign(*pt['circle_circle_color']) p.setRgb('Circle color', c) p.setFloat('RadiusU', pt['circle_radius_u']) p.setFloat('RadiusV', pt['circle_radius_v']) p.setFloat('Transition factor', pt['circle_transition_factor']) p.setUInt('Fall-off', pt['circle_falloff']) t.addProceduralTexture(p) elif(pt['use'] == 'GRADIENT3'): e = self.mgr.createDefaultTextureExtension('Gradient3') p = e.getExtensionData() p.setFloat('Blend procedural', pt['blending_factor']) p.setByte('Gradient U', pt['gradient3_gradient_u']) c = Crgb() c.assign(*pt['gradient3_color0_u']) p.setRgb('Color0 U', c) c = Crgb() c.assign(*pt['gradient3_color1_u']) p.setRgb('Color1 U', c) c = Crgb() c.assign(*pt['gradient3_color2_u']) p.setRgb('Color2 U', c) p.setUInt('Gradient type U', pt['gradient3_gradient_type_u']) p.setFloat('Color1 U position', pt['gradient3_color1_u_position']) p.setByte('Gradient V', pt['gradient3_gradient_v']) c = Crgb() c.assign(*pt['gradient3_color0_v']) p.setRgb('Color0 V', c) c = Crgb() c.assign(*pt['gradient3_color1_v']) p.setRgb('Color1 V', c) c = Crgb() c.assign(*pt['gradient3_color2_v']) p.setRgb('Color2 V', c) p.setUInt('Gradient type V', pt['gradient3_gradient_type_v']) p.setFloat('Color1 V position', pt['gradient3_color1_v_position']) t.addProceduralTexture(p) elif(pt['use'] == 'GRADIENT'): e = self.mgr.createDefaultTextureExtension('Gradient') p = e.getExtensionData() p.setFloat('Blend procedural', pt['blending_factor']) p.setByte('Gradient U', pt['gradient_gradient_u']) c = Crgb() c.assign(*pt['gradient_color0_u']) p.setRgb('Color0 U', c) c = Crgb() c.assign(*pt['gradient_color1_u']) p.setRgb('Color1 U', c) p.setUInt('Gradient type U', pt['gradient_gradient_type_u']) p.setFloat('Transition factor U', pt['gradient_transition_factor_u']) p.setByte('Gradient V', pt['gradient_gradient_v']) c = Crgb() c.assign(*pt['gradient_color0_v']) p.setRgb('Color0 V', c) c = Crgb() c.assign(*pt['gradient_color1_v']) p.setRgb('Color1 V', c) p.setUInt('Gradient type V', pt['gradient_gradient_type_v']) p.setFloat('Transition factor V', pt['gradient_transition_factor_v']) t.addProceduralTexture(p) elif(pt['use'] == 'GRID'): e = self.mgr.createDefaultTextureExtension('Grid') p = e.getExtensionData() p.setFloat('Blend procedural', pt['blending_factor']) c = Crgb() c.assign(*pt['grid_boundary_color']) p.setRgb('Boundary color', c) c = Crgb() c.assign(*pt['grid_cell_color']) p.setRgb('Cell color', c) p.setFloat('Cell width', pt['grid_cell_width']) p.setFloat('Cell height', pt['grid_cell_height']) if(pt['grid_horizontal_lines']): p.setFloat('Boundary thickness U', pt['grid_boundary_thickness_u']) else: p.setFloat('Boundary thickness U', 0.0) if(pt['grid_vertical_lines']): p.setFloat('Boundary thickness V', pt['grid_boundary_thickness_v']) else: p.setFloat('Boundary thickness V', 0.0) p.setFloat('Transition sharpness', pt['grid_transition_sharpness']) p.setUInt('Fall-off', pt['grid_falloff']) t.addProceduralTexture(p) elif(pt['use'] == 'MARBLE'): e = self.mgr.createDefaultTextureExtension('Marble') p = e.getExtensionData() p.setFloat('Blend procedural', pt['blending_factor']) p.setUInt('Coordinates type', pt['marble_coordinates_type']) c = Crgb() c.assign(*pt['marble_color0']) p.setRgb('Color0', c) c = Crgb() c.assign(*pt['marble_color1']) p.setRgb('Color1', c) c = Crgb() c.assign(*pt['marble_color2']) p.setRgb('Color2', c) p.setFloat('Frequency', pt['marble_frequency']) p.setFloat('Detail', pt['marble_detail']) p.setInt('Octaves', pt['marble_octaves']) p.setUInt('Seed', pt['marble_seed']) t.addProceduralTexture(p) elif(pt['use'] == 'NOISE'): e = self.mgr.createDefaultTextureExtension('Noise') p = e.getExtensionData() p.setFloat('Blend procedural', pt['blending_factor']) p.setUInt('Coordinates type', pt['noise_coordinates_type']) c = Crgb() c.assign(*pt['noise_noise_color']) p.setRgb('Noise color', c) c = Crgb() c.assign(*pt['noise_background_color']) p.setRgb('Background color', c) p.setFloat('Detail', pt['noise_detail']) p.setFloat('Persistance', pt['noise_persistance']) p.setInt('Octaves', pt['noise_octaves']) p.setFloat('Low value', pt['noise_low_value']) p.setFloat('High value', pt['noise_high_value']) p.setUInt('Seed', pt['noise_seed']) t.addProceduralTexture(p) elif(pt['use'] == 'VORONOI'): e = self.mgr.createDefaultTextureExtension('Voronoi') p = e.getExtensionData() p.setFloat('Blend procedural', pt['blending_factor']) p.setUInt('Coordinates type', pt['voronoi_coordinates_type']) c = Crgb() c.assign(*pt['voronoi_color0']) p.setRgb('Color0', c) c = Crgb() c.assign(*pt['voronoi_color1']) p.setRgb('Color1', c) p.setInt('Detail', pt['voronoi_detail']) p.setUInt('Distance', pt['voronoi_distance']) p.setUInt('Combination', pt['voronoi_combination']) p.setFloat('Low value', pt['voronoi_low_value']) p.setFloat('High value', pt['voronoi_high_value']) p.setUInt('Seed', pt['voronoi_seed']) t.addProceduralTexture(p) elif(pt['use'] == 'TILED'): e = self.mgr.createDefaultTextureExtension('TiledTexture') p = e.getExtensionData() p.setFloat('Blend factor', pt['blending_factor']) c = Crgb() c.assign(*pt['tiled_base_color']) p.setRgb('Base Color', c) p.setByte('Use base color', pt['tiled_use_base_color']) p.setString('Filename_mask', pt['tiled_token_mask']) p.setString('Filename', pt['tiled_filename']) # 'Map U tile range' UCHAR # 'Map V tile range' UCHAR t.addProceduralTexture(p) elif(pt['use'] == 'WIREFRAME'): e = self.mgr.createDefaultTextureExtension('WireframeTexture') p = e.getExtensionData() c = Crgb() c.assign(*pt['wireframe_fill_color']) p.setRgb('Fill Color', c) c = Crgb() c.assign(*pt['wireframe_edge_color']) p.setRgb('Edge Color', c) c = Crgb() c.assign(*pt['wireframe_coplanar_edge_color']) p.setRgb('Coplanar Edge Color', c) p.setFloat('Edge Width', pt['wireframe_edge_width']) p.setFloat('Coplanar Edge Width', pt['wireframe_coplanar_edge_width']) p.setFloat('Coplanar Threshold', pt['wireframe_coplanar_threshold']) t.addProceduralTexture(p) else: raise TypeError("{0} is unknown procedural texture type".format(pt['use'])) return t def material_placeholder(self, n=None, ): if(n is not None): pass else: n = 'MATERIAL_PLACEHOLDER' s = self.mxs m = s.createMaterial(n) l = m.addLayer() b = l.addBSDF() r = b.getReflectance() a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = CtextureMap() mgr = CextensionManager.instance() mgr.loadAllExtensions() e = mgr.createDefaultTextureExtension('Checker') ch = e.getExtensionData() ch.setUInt('Number of elements U', 32) ch.setUInt('Number of elements V', 32) t.addProceduralTexture(ch) a.textureMap = t r.setAttribute('color', a) return m def material_default(self, n, ): s = self.mxs m = s.createMaterial(n) l = m.addLayer() b = l.addBSDF() return m def material_external(self, d, ): s = self.mxs p = d['path'] t = s.readMaterial(p) t.setName(d['name']) m = s.addMaterial(t) if(not d['embed']): m.setReference(1, p) return m def material_custom(self, d, ): s = self.mxs m = s.createMaterial(d['name']) d = d['data'] def global_props(d, m): # global properties if(d['override_map']): t = self.texture(d['override_map']) if(t is not None): m.setGlobalMap(t) if(d['bump_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(d['bump_map']) if(t is not None): a.textureMap = t if(d['bump_map_use_normal']): a.value = d['bump_normal'] else: a.value = d['bump'] m.setAttribute('bump', a) m.setNormalMapState(d['bump_map_use_normal']) m.setDispersion(d['dispersion']) m.setMatteShadow(d['shadow']) m.setMatte(d['matte']) m.setNestedPriority(d['priority']) c = Crgb() c.assign(*d['id']) m.setColorID(c) if(d['active_display_map']): t = self.texture(d['active_display_map']) m.setActiveDisplayMap(t) def displacement(d, m): if(not d['enabled']): return m.enableDisplacement(True) if(d['map'] is not None): t = self.texture(d['map']) m.setDisplacementMap(t) m.setDisplacementCommonParameters(d['type'], d['subdivision'], int(d['smoothing']), d['offset'], d['subdivision_method'], d['uv_interpolation'], ) m.setHeightMapDisplacementParameters(d['height'], d['height_units'], d['adaptive'], ) v = Cvector(*d['v3d_scale']) m.setVectorDisplacementParameters(v, d['v3d_transform'], d['v3d_rgb_mapping'], d['v3d_preset'], ) def add_bsdf(d, l): b = l.addBSDF() b.setName(d['name']) bp = d['bsdf_props'] # weight if(bp['weight_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(bp['weight_map']) if(t is not None): a.textureMap = t a.value = bp['weight'] else: a = Cattribute() a.activeType = MAP_TYPE_VALUE a.value = bp['weight'] b.setWeight(a) # enabled if(not bp['visible']): b.setState(False) # ior r = b.getReflectance() if(bp['ior'] == 1): # measured data r.setActiveIorMode(1) r.setComplexIor(bp['complex_ior']) else: if(bp['reflectance_0_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(bp['reflectance_0_map']) if(t is not None): a.textureMap = t a.rgb.assign(*bp['reflectance_0']) else: a = Cattribute() a.activeType = MAP_TYPE_RGB a.rgb.assign(*bp['reflectance_0']) r.setAttribute('color', a) if(bp['reflectance_90_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(bp['reflectance_90_map']) if(t is not None): a.textureMap = t a.rgb.assign(*bp['reflectance_90']) else: a = Cattribute() a.activeType = MAP_TYPE_RGB a.rgb.assign(*bp['reflectance_90']) r.setAttribute('color.tangential', a) if(bp['transmittance_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(bp['transmittance_map']) if(t is not None): a.textureMap = t a.rgb.assign(*bp['transmittance']) else: a = Cattribute() a.activeType = MAP_TYPE_RGB a.rgb.assign(*bp['transmittance']) r.setAttribute('transmittance.color', a) r.setAbsorptionDistance(bp['attenuation_units'], bp['attenuation']) r.setIOR(bp['nd'], bp['abbe']) if(bp['force_fresnel']): r.enableForceFresnel(True) r.setConductor(bp['k']) if(bp['r2_enabled']): r.setFresnelCustom(bp['r2_falloff_angle'], bp['r2_influence'], True, ) # surface if(bp['roughness_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(bp['roughness_map']) if(t is not None): a.textureMap = t a.value = bp['roughness'] else: a = Cattribute() a.activeType = MAP_TYPE_VALUE a.value = bp['roughness'] b.setAttribute('roughness', a) if(bp['bump_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(bp['bump_map']) if(t is not None): a.textureMap = t if(bp['bump_map_use_normal']): a.value = bp['bump_normal'] else: a.value = bp['bump'] else: a = Cattribute() a.activeType = MAP_TYPE_VALUE if(bp['bump_map_use_normal']): a.value = bp['bump_normal'] else: a.value = bp['bump'] b.setAttribute('bump', a) b.setNormalMapState(bp['bump_map_use_normal']) if(bp['anisotropy_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(bp['anisotropy_map']) if(t is not None): a.textureMap = t a.value = bp['anisotropy'] else: a = Cattribute() a.activeType = MAP_TYPE_VALUE a.value = bp['anisotropy'] b.setAttribute('anisotropy', a) if(bp['anisotropy_angle_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(bp['anisotropy_angle_map']) if(t is not None): a.textureMap = t a.value = bp['anisotropy_angle'] else: a = Cattribute() a.activeType = MAP_TYPE_VALUE a.value = bp['anisotropy_angle'] b.setAttribute('angle', a) # subsurface a = Cattribute() a.activeType = MAP_TYPE_RGB a.rgb.assign(*bp['scattering']) r.setAttribute('scattering', a) r.setScatteringParameters(bp['coef'], bp['asymmetry'], bp['single_sided']) if(bp['single_sided']): if(bp['single_sided_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(bp['single_sided_map']) if(t is not None): a.textureMap = t a.value = bp['single_sided_value'] else: a = Cattribute() a.activeType = MAP_TYPE_VALUE a.value = bp['single_sided_value'] r.setScatteringThickness(a) r.setScatteringThicknessRange(bp['single_sided_min'], bp['single_sided_max']) # coating cp = d['coating'] if(cp['enabled']): c = b.addCoating() if(cp['thickness_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(cp['thickness_map']) if(t is not None): a.textureMap = t a.value = cp['thickness'] else: a = Cattribute() a.activeType = MAP_TYPE_VALUE a.value = cp['thickness'] c.setThickness(a) c.setThicknessRange(cp['thickness_map_min'], cp['thickness_map_max']) r = c.getReflectance() if(cp['ior'] == 1): # measured data r.setActiveIorMode(1) r.setComplexIor(cp['complex_ior']) else: if(cp['reflectance_0_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(cp['reflectance_0_map']) if(t is not None): a.textureMap = t a.rgb.assign(*cp['reflectance_0']) else: a = Cattribute() a.activeType = MAP_TYPE_RGB a.rgb.assign(*cp['reflectance_0']) r.setAttribute('color', a) if(cp['reflectance_90_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(cp['reflectance_90_map']) if(t is not None): a.textureMap = t a.rgb.assign(*cp['reflectance_90']) else: a = Cattribute() a.activeType = MAP_TYPE_RGB a.rgb.assign(*cp['reflectance_90']) r.setAttribute('color.tangential', a) r.setIOR(cp['nd'], 1.0, ) if(cp['force_fresnel']): r.enableForceFresnel(True) r.setConductor(cp['k']) if(cp['r2_enabled']): r.setFresnelCustom(cp['r2_falloff_angle'], 0.0, True, ) def add_emitter(d, l): e = l.createEmitter() if(d['type'] == 0): e.setLobeType(EMISSION_LOBE_DEFAULT) elif(d['type'] == 1): e.setLobeType(EMISSION_LOBE_IES) e.setLobeIES(d['ies_data']) e.setIESLobeIntensity(d['ies_intensity']) elif(d['type'] == 2): e.setLobeType(EMISSION_LOBE_SPOTLIGHT) if(d['spot_map'] is not None): t = self.texture(d['spot_map']) if(t is not None): e.setLobeImageProjectedMap(d['spot_map_enabled'], t) e.setSpotConeAngle(d['spot_cone_angle']) e.setSpotFallOffAngle(d['spot_falloff_angle']) e.setSpotFallOffType(d['spot_falloff_type']) e.setSpotBlur(d['spot_blur']) if(d['emission'] == 0): e.setActiveEmissionType(EMISSION_TYPE_PAIR) ep = CemitterPair() c = Crgb() c.assign(*d['color']) ep.rgb.assign(c) ep.temperature = d['color_black_body'] ep.watts = d['luminance_power'] ep.luminousEfficacy = d['luminance_efficacy'] ep.luminousPower = d['luminance_output'] ep.illuminance = d['luminance_output'] ep.luminousIntensity = d['luminance_output'] ep.luminance = d['luminance_output'] e.setPair(ep) if(d['luminance'] == 0): u = EMISSION_UNITS_WATTS_AND_LUMINOUS_EFFICACY elif(d['luminance'] == 1): u = EMISSION_UNITS_LUMINOUS_POWER elif(d['luminance'] == 2): u = EMISSION_UNITS_ILLUMINANCE elif(d['luminance'] == 3): u = EMISSION_UNITS_LUMINOUS_INTENSITY elif(d['luminance'] == 4): u = EMISSION_UNITS_LUMINANCE if(d['color_black_body_enabled']): e.setActivePair(EMISSION_COLOR_TEMPERATURE, u) else: e.setActivePair(EMISSION_RGB, u) elif(d['emission'] == 1): e.setActiveEmissionType(EMISSION_TYPE_TEMPERATURE) e.setTemperature(d['temperature_value']) elif(d['emission'] == 2): e.setActiveEmissionType(EMISSION_TYPE_MXI) a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(d['hdr_map']) if(t is not None): a.textureMap = t a.value = d['hdr_intensity'] e.setMXI(a) e.setState(True) def add_layer(d, m): l = m.addLayer() l.setName(d['name']) lpd = d['layer_props'] if(not lpd['visible']): l.setEnabled(False) if(lpd['blending'] == 1): l.setStackedBlendingMode(1) if(lpd['opacity_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(lpd['opacity_map']) if(t is not None): a.textureMap = t a.value = lpd['opacity'] else: a = Cattribute() a.activeType = MAP_TYPE_VALUE a.value = lpd['opacity'] l.setAttribute('weight', a) epd = d['emitter'] if(epd['enabled']): add_emitter(epd, l) for b in d['bsdfs']: add_bsdf(b, l) global_props(d['global_props'], m) displacement(d['displacement'], m) for layer in d['layers']: add_layer(layer, m) return m def material(self, d, ): s = self.mxs if(d['subtype'] == 'EXTERNAL'): if(d['path'] == ''): m = self.material_placeholder(d['name']) else: m = self.material_external(d) if(d['override']): # global properties if(d['override_map']): t = self.texture(d['override_map']) if(t is not None): m.setGlobalMap(t) if(d['bump_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(d['bump_map']) if(t is not None): a.textureMap = t if(d['bump_map_use_normal']): a.value = d['bump_normal'] else: a.value = d['bump'] m.setAttribute('bump', a) m.setNormalMapState(d['bump_map_use_normal']) m.setDispersion(d['dispersion']) m.setMatteShadow(d['shadow']) m.setMatte(d['matte']) m.setNestedPriority(d['priority']) c = Crgb() c.assign(*d['id']) m.setColorID(c) elif(d['subtype'] == 'EXTENSION'): if(d['use'] == 'EMITTER'): m = s.createMaterial(d['name']) l = m.addLayer() e = l.createEmitter() if(d['emitter_type'] == 0): e.setLobeType(EMISSION_LOBE_DEFAULT) elif(d['emitter_type'] == 1): e.setLobeType(EMISSION_LOBE_IES) e.setLobeIES(d['emitter_ies_data']) e.setIESLobeIntensity(d['emitter_ies_intensity']) elif(d['emitter_type'] == 2): e.setLobeType(EMISSION_LOBE_SPOTLIGHT) if(d['emitter_spot_map'] is not None): t = self.texture(d['emitter_spot_map']) if(t is not None): e.setLobeImageProjectedMap(d['emitter_spot_map_enabled'], t) e.setSpotConeAngle(d['emitter_spot_cone_angle']) e.setSpotFallOffAngle(d['emitter_spot_falloff_angle']) e.setSpotFallOffType(d['emitter_spot_falloff_type']) e.setSpotBlur(d['emitter_spot_blur']) if(d['emitter_emission'] == 0): e.setActiveEmissionType(EMISSION_TYPE_PAIR) ep = CemitterPair() c = Crgb() c.assign(*d['emitter_color']) ep.rgb.assign(c) ep.temperature = d['emitter_color_black_body'] ep.watts = d['emitter_luminance_power'] ep.luminousEfficacy = d['emitter_luminance_efficacy'] ep.luminousPower = d['emitter_luminance_output'] ep.illuminance = d['emitter_luminance_output'] ep.luminousIntensity = d['emitter_luminance_output'] ep.luminance = d['emitter_luminance_output'] e.setPair(ep) if(d['emitter_color_black_body_enabled']): if(d['emitter_luminance'] == 0): u = EMISSION_UNITS_WATTS_AND_LUMINOUS_EFFICACY elif(d['emitter_luminance'] == 1): u = EMISSION_UNITS_LUMINOUS_POWER elif(d['emitter_luminance'] == 2): u = EMISSION_UNITS_ILLUMINANCE elif(d['emitter_luminance'] == 3): u = EMISSION_UNITS_LUMINOUS_INTENSITY elif(d['emitter_luminance'] == 4): u = EMISSION_UNITS_LUMINANCE e.setActivePair(EMISSION_COLOR_TEMPERATURE, u) else: if(d['emitter_luminance'] == 0): u = EMISSION_UNITS_WATTS_AND_LUMINOUS_EFFICACY elif(d['emitter_luminance'] == 1): u = EMISSION_UNITS_LUMINOUS_POWER elif(d['emitter_luminance'] == 2): u = EMISSION_UNITS_ILLUMINANCE elif(d['emitter_luminance'] == 3): u = EMISSION_UNITS_LUMINOUS_INTENSITY elif(d['emitter_luminance'] == 4): u = EMISSION_UNITS_LUMINANCE e.setActivePair(EMISSION_RGB, u) elif(d['emitter_emission'] == 1): e.setActiveEmissionType(EMISSION_TYPE_TEMPERATURE) e.setTemperature(d['emitter_temperature_value']) elif(d['emitter_emission'] == 2): e.setActiveEmissionType(EMISSION_TYPE_MXI) a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(d['emitter_hdr_map']) if(t is not None): a.textureMap = t a.value = d['emitter_hdr_intensity'] e.setMXI(a) e.setState(True) def global_props(d, m): # global properties if(d['override_map']): t = texture(d['override_map'], s, ) if(t is not None): m.setGlobalMap(t) if(d['bump_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = texture(d['bump_map'], s, ) if(t is not None): a.textureMap = t if(d['bump_map_use_normal']): a.value = d['bump_normal'] else: a.value = d['bump'] m.setAttribute('bump', a) m.setNormalMapState(d['bump_map_use_normal']) m.setDispersion(d['dispersion']) m.setMatteShadow(d['shadow']) m.setMatte(d['matte']) m.setNestedPriority(d['priority']) c = Crgb() c.assign(*d['id']) m.setColorID(c) if(d['active_display_map']): t = texture(d['active_display_map'], s, ) m.setActiveDisplayMap(t) global_props(d, m) else: m = CextensionManager.instance() m.loadAllExtensions() if(d['use'] == 'AGS'): e = m.createDefaultMaterialModifierExtension('AGS') p = e.getExtensionData() c = Crgb() c.assign(*d['ags_color']) p.setRgb('Color', c) p.setFloat('Reflection', d['ags_reflection']) p.setUInt('Type', d['ags_type']) elif(d['use'] == 'OPAQUE'): e = m.createDefaultMaterialModifierExtension('Opaque') p = e.getExtensionData() p.setByte('Color Type', d['opaque_color_type']) c = Crgb() c.assign(*d['opaque_color']) p.setRgb('Color', c) self.texture_data_to_mxparams('Color Map', d['opaque_color_map'], p, ) p.setByte('Shininess Type', d['opaque_shininess_type']) p.setFloat('Shininess', d['opaque_shininess']) self.texture_data_to_mxparams('Shininess Map', d['opaque_shininess_map'], p, ) p.setByte('Roughness Type', d['opaque_roughness_type']) p.setFloat('Roughness', d['opaque_roughness']) self.texture_data_to_mxparams('Roughness Map', d['opaque_roughness_map'], p, ) p.setByte('Clearcoat', d['opaque_clearcoat']) elif(d['use'] == 'TRANSPARENT'): e = m.createDefaultMaterialModifierExtension('Transparent') p = e.getExtensionData() p.setByte('Color Type', d['transparent_color_type']) c = Crgb() c.assign(*d['transparent_color']) p.setRgb('Color', c) self.texture_data_to_mxparams('Color Map', d['transparent_color_map'], p, ) p.setFloat('Ior', d['transparent_ior']) p.setFloat('Transparency', d['transparent_transparency']) p.setByte('Roughness Type', d['transparent_roughness_type']) p.setFloat('Roughness', d['transparent_roughness']) self.texture_data_to_mxparams('Roughness Map', d['transparent_roughness_map'], p, ) p.setFloat('Specular Tint', d['transparent_specular_tint']) p.setFloat('Dispersion', d['transparent_dispersion']) p.setByte('Clearcoat', d['transparent_clearcoat']) elif(d['use'] == 'METAL'): e = m.createDefaultMaterialModifierExtension('Metal') p = e.getExtensionData() p.setUInt('IOR', d['metal_ior']) p.setFloat('Tint', d['metal_tint']) p.setByte('Color Type', d['metal_color_type']) c = Crgb() c.assign(*d['metal_color']) p.setRgb('Color', c) self.texture_data_to_mxparams('Color Map', d['metal_color_map'], p, ) p.setByte('Roughness Type', d['metal_roughness_type']) p.setFloat('Roughness', d['metal_roughness']) self.texture_data_to_mxparams('Roughness Map', d['metal_roughness_map'], p, ) p.setByte('Anisotropy Type', d['metal_anisotropy_type']) p.setFloat('Anisotropy', d['metal_anisotropy']) self.texture_data_to_mxparams('Anisotropy Map', d['metal_anisotropy_map'], p, ) p.setByte('Angle Type', d['metal_angle_type']) p.setFloat('Angle', d['metal_angle']) self.texture_data_to_mxparams('Angle Map', d['metal_angle_map'], p, ) p.setByte('Dust Type', d['metal_dust_type']) p.setFloat('Dust', d['metal_dust']) self.texture_data_to_mxparams('Dust Map', d['metal_dust_map'], p, ) p.setByte('Perforation Enabled', d['metal_perforation_enabled']) self.texture_data_to_mxparams('Perforation Map', d['metal_perforation_map'], p, ) elif(d['use'] == 'TRANSLUCENT'): e = m.createDefaultMaterialModifierExtension('Translucent') p = e.getExtensionData() p.setFloat('Scale', d['translucent_scale']) p.setFloat('Ior', d['translucent_ior']) p.setByte('Color Type', d['translucent_color_type']) c = Crgb() c.assign(*d['translucent_color']) p.setRgb('Color', c) self.texture_data_to_mxparams('Color Map', d['translucent_color_map'], p, ) p.setFloat('Hue Shift', d['translucent_hue_shift']) p.setByte('Invert Hue', d['translucent_invert_hue']) p.setFloat('Vibrance', d['translucent_vibrance']) p.setFloat('Density', d['translucent_density']) p.setFloat('Opacity', d['translucent_opacity']) p.setByte('Roughness Type', d['translucent_roughness_type']) p.setFloat('Roughness', d['translucent_roughness']) self.texture_data_to_mxparams('Roughness Map', d['translucent_roughness_map'], p, ) p.setFloat('Specular Tint', d['translucent_specular_tint']) p.setByte('Clearcoat', d['translucent_clearcoat']) p.setFloat('Clearcoat Ior', d['translucent_clearcoat_ior']) elif(d['use'] == 'CARPAINT'): e = m.createDefaultMaterialModifierExtension('Car Paint') p = e.getExtensionData() c = Crgb() c.assign(*d['carpaint_color']) p.setRgb('Color', c) p.setFloat('Metallic', d['carpaint_metallic']) p.setFloat('Topcoat', d['carpaint_topcoat']) elif(d['use'] == 'HAIR'): e = m.createDefaultMaterialModifierExtension('Hair') p = e.getExtensionData() p.setByte('Color Type', d['hair_color_type']) c = Crgb() c.assign(*d['hair_color']) p.setRgb('Color', c) self.texture_data_to_mxparams('Color Map', d['hair_color_map'], p, ) self.texture_data_to_mxparams('Root-Tip Map', d['hair_root_tip_map'], p, ) p.setByte('Root-Tip Weight Type', d['hair_root_tip_weight_type']) p.setFloat('Root-Tip Weight', d['hair_root_tip_weight']) self.texture_data_to_mxparams('Root-Tip Weight Map', d['hair_root_tip_weight_map'], p, ) p.setFloat('Primary Highlight Strength', d['hair_primary_highlight_strength']) p.setFloat('Primary Highlight Spread', d['hair_primary_highlight_spread']) c = Crgb() c.assign(*d['hair_primary_highlight_tint']) p.setRgb('Primary Highlight Tint', c) p.setFloat('Secondary Highlight Strength', d['hair_secondary_highlight_strength']) p.setFloat('Secondary Highlight Spread', d['hair_secondary_highlight_spread']) c = Crgb() c.assign(*d['hair_secondary_highlight_tint']) p.setRgb('Secondary Highlight Tint', c) m = s.createMaterial(d['name']) m.applyMaterialModifierExtension(p) # global properties if(d['override_map']): t = self.texture(d['override_map']) if(t is not None): m.setGlobalMap(t) if(d['bump_map_enabled']): a = Cattribute() a.activeType = MAP_TYPE_BITMAP t = self.texture(d['bump_map']) if(t is not None): a.textureMap = t if(d['bump_map_use_normal']): a.value = d['bump_normal'] else: a.value = d['bump'] m.setAttribute('bump', a) m.setNormalMapState(d['bump_map_use_normal']) m.setDispersion(d['dispersion']) m.setMatteShadow(d['shadow']) m.setMatte(d['matte']) m.setNestedPriority(d['priority']) c = Crgb() c.assign(*d['id']) m.setColorID(c) if(d['active_display_map']): t = texture(d['active_display_map'], s, ) m.setActiveDisplayMap(t) def displacement(d, m): if(not d['enabled']): return m.enableDisplacement(True) if(d['map'] is not None): t = self.texture(d['map']) m.setDisplacementMap(t) m.setDisplacementCommonParameters(d['type'], d['subdivision'], int(d['smoothing']), d['offset'], d['subdivision_method'], d['uv_interpolation'], ) m.setHeightMapDisplacementParameters(d['height'], d['height_units'], d['adaptive'], ) v = Cvector(*d['v3d_scale']) m.setVectorDisplacementParameters(v, d['v3d_transform'], d['v3d_rgb_mapping'], d['v3d_preset'], ) try: displacement(d['displacement'], m) except KeyError: pass elif(d['subtype'] == 'CUSTOM'): m = self.material_custom(d) else: raise TypeError("Material '{}' {} is unknown type".format(d['name'], d['subtype'])) return m def get_material(self, n, ): """get material by name from scene, if material is missing, create and return placeholder""" def get_material_names(s): it = CmaxwellMaterialIterator() o = it.first(s) l = [] while not o.isNull(): name = o.getName() l.append(name) o = it.next() return l s = self.mxs names = get_material_names(s) m = None if(n in names): m = s.getMaterial(n) if(m is None): # should not happen because i stopped changing material names.. but i leave it here m = self.material_placeholder() return m class MXMEmitterCheck(): def __init__(self, path, ): if(__name__ != "__main__"): if(platform.system() == 'Darwin'): raise ImportError("No pymaxwell for Mac OS X..") log(self.__class__.__name__, 1, LogStyles.MESSAGE, prefix="* ", ) self.path = path self.mxs = Cmaxwell(mwcallback) self.emitter = False m = self.mxs.readMaterial(self.path) for i in range(m.getNumLayers()[0]): l = m.getLayer(i) e = l.getEmitter() if(e.isNull()): # no emitter layer self.emitter = False return if(not e.getState()[0]): # there is, but is disabled self.emitter = False return # is emitter self.emitter = True class MXSReader(): def __init__(self, path, ): if(__name__ != "__main__"): if(platform.system() == 'Darwin'): raise ImportError("No pymaxwell for Mac OS X..") log(self.__class__.__name__, 1, LogStyles.MESSAGE, prefix="* ", ) self.path = path self.mxs = Cmaxwell(mwcallback) log("loading {}".format(self.path), 2, prefix="* ", ) self.mxs.readMXS(self.path) if(self.mxs.isProtectionEnabled()): raise RuntimeError("Protected MXS") self._prepare() def _mxs_get_objects_names(self): s = self.mxs it = CmaxwellObjectIterator() o = it.first(s) l = [] while not o.isNull(): name, _ = o.getName() l.append(name) o = it.next() return l def _mxs_object(self, o): object_name, _ = o.getName() is_instance, _ = o.isInstance() is_mesh, _ = o.isMesh() if(is_instance == 0 and is_mesh == 0): log("{}: only empties, meshes and instances are supported..".format(object_name), 2, LogStyles.WARNING, ) return None # skip not posrotscale initialized objects is_init, _ = o.isPosRotScaleInitialized() if(not is_init): # log("{}: object is not initialized, skipping..".format(object_name), 2, LogStyles.WARNING, ) log("{}: object is not initialized..".format(object_name), 2, LogStyles.WARNING, ) # return None r = {'name': o.getName()[0], 'vertices': [], 'normals': [], 'triangles': [], 'trianglesUVW': [], 'matrix': (), 'parent': None, 'type': '', 'materials': [], 'nmats': 0, 'matnames': [], } if(is_instance == 1): io = o.getInstanced() ion = io.getName()[0] b, p = self._base_and_pivot(o) r = {'name': o.getName()[0], 'base': b, 'pivot': p, 'parent': None, 'type': 'INSTANCE', 'instanced': ion, } # no multi material instances, always one material per instance m, _ = o.getMaterial() if(m.isNull() == 1): r['material'] = None else: r['material'] = o.getName() p, _ = o.getParent() if(not p.isNull()): r['parent'] = p.getName()[0] cid, _ = o.getColorID() rgb8 = cid.toRGB8() col = [str(rgb8.r()), str(rgb8.g()), str(rgb8.b())] r['colorid'] = ", ".join(col) h = [] if(o.getHideToCamera()): h.append("C") if(o.getHideToGI()): h.append("GI") if(o.getHideToReflectionsRefractions()): h.append("RR") r['hidden'] = ", ".join(h) r['referenced_mxs'] = False r['referenced_mxs_path'] = None rmp = io.getReferencedScenePath() if(rmp != ""): r['referenced_mxs'] = True r['referenced_mxs_path'] = rmp return r # counts nv, _ = o.getVerticesCount() nn, _ = o.getNormalsCount() nt, _ = o.getTrianglesCount() nppv, _ = o.getPositionsPerVertexCount() ppv = 0 r['referenced_mxs'] = False r['referenced_mxs_path'] = None if(nv > 0): r['type'] = 'MESH' cid, _ = o.getColorID() rgb8 = cid.toRGB8() col = [str(rgb8.r()), str(rgb8.g()), str(rgb8.b())] r['colorid'] = ", ".join(col) h = [] if(o.getHideToCamera()): h.append("C") if(o.getHideToGI()): h.append("GI") if(o.getHideToReflectionsRefractions()): h.append("RR") r['hidden'] = ", ".join(h) else: r['type'] = 'EMPTY' rmp = o.getReferencedScenePath() if(rmp != ""): r['referenced_mxs'] = True r['referenced_mxs_path'] = rmp cid, _ = o.getColorID() rgb8 = cid.toRGB8() col = [str(rgb8.r()), str(rgb8.g()), str(rgb8.b())] r['colorid'] = ", ".join(col) if(nppv - 1 != ppv and nv != 0): log("only one position per vertex is supported..", 2, LogStyles.WARNING, ) # vertices for i in range(nv): v, _ = o.getVertex(i, ppv) # (float x, float y, float z) r['vertices'].append((v.x(), v.y(), v.z())) # normals for i in range(nn): v, _ = o.getNormal(i, ppv) # (float x, float y, float z) r['normals'].append((v.x(), v.y(), v.z())) # triangles for i in range(nt): t = o.getTriangle(i) # (int v1, int v2, int v3, int n1, int n2, int n3) r['triangles'].append(t) # materials mats = [] for i in range(nt): m, _ = o.getTriangleMaterial(i) if(m.isNull() == 1): n = None else: n = m.getName() if(n not in mats): mats.append(n) r['materials'].append((i, n)) r['nmats'] = len(mats) r['matnames'] = mats # uv channels ncuv, _ = o.getChannelsUVWCount() for cuv in range(ncuv): # uv triangles r['trianglesUVW'].append([]) for i in range(nt): t = o.getTriangleUVW(i, cuv) # float u1, float v1, float w1, float u2, float v2, float w2, float u3, float v3, float w3 r['trianglesUVW'][cuv].append(t) # base and pivot to matrix b, p = self._base_and_pivot(o) r['base'] = b r['pivot'] = p # parent p, _ = o.getParent() if(not p.isNull()): r['parent'] = p.getName()[0] return r def _mxs_camera(self, c): v = c.getValues() v = {'name': v[0], 'nSteps': v[1], 'shutter': v[2], 'filmWidth': v[3], 'filmHeight': v[4], 'iso': v[5], 'pDiaphragmType': v[6], 'angle': v[7], 'nBlades': v[8], 'fps': v[9], 'xRes': v[10], 'yRes': v[11], 'pixelAspect': v[12], 'lensType': v[13], } s = c.getStep(0) o = s[0] f = s[1] u = s[2] # skip weird cameras flc = s[3] co = s[0] fp = s[1] d = Cvector() d.substract(fp, co) fd = d.norm() if(flc == 0.0 or fd == 0.0): log("{}: impossible camera, skipping..".format(v['name']), 2, LogStyles.WARNING) return None r = {'name': v['name'], 'shutter': 1.0 / v['shutter'], 'iso': v['iso'], 'x_res': v['xRes'], 'y_res': v['yRes'], 'pixel_aspect': v['pixelAspect'], 'origin': (o.x(), o.y(), o.z()), 'focal_point': (f.x(), f.y(), f.z()), 'up': (u.x(), u.y(), u.z()), 'focal_length': self._uncorrect_focal_length(s) * 1000.0, 'f_stop': s[4], 'film_width': round(v['filmWidth'] * 1000.0, 3), 'film_height': round(v['filmHeight'] * 1000.0, 3), 'active': False, 'sensor_fit': None, 'shift_x': 0.0, 'shift_y': 0.0, 'zclip': False, 'zclip_near': 0.0, 'zclip_far': 1000000.0, 'type': 'CAMERA', } if(r['film_width'] > r['film_height']): r['sensor_fit'] = 'HORIZONTAL' else: r['sensor_fit'] = 'VERTICAL' cp = c.getCutPlanes() if(cp[2] is True): r['zclip'] = True r['zclip_near'] = cp[0] r['zclip_far'] = cp[1] sl = c.getShiftLens() r['shift_x'] = sl[0] r['shift_y'] = sl[1] d = c.getDiaphragm() r['diaphragm_type'] = d[0][0] r['diaphragm_angle'] = d[1] r['diaphragm_blades'] = d[2] return r def _base_and_pivot(self, o): b, p, _ = o.getBaseAndPivot() o = b.origin x = b.xAxis y = b.yAxis z = b.zAxis rb = [[o.x(), o.y(), o.z()], [x.x(), x.y(), x.z()], [y.x(), y.y(), y.z()], [z.x(), z.y(), z.z()]] rp = ((0.0, 0.0, 0.0), (1.0, 0.0, 0.0), (0.0, 1.0, 0.0), (0.0, 0.0, 1.0), ) return rb, rp def _uncorrect_focal_length(self, step): flc = step[3] o = step[0] fp = step[1] d = Cvector() d.substract(fp, o) fd = d.norm() fluc = 1.0 / (1.0 / flc - 1 / fd) return fluc def _prepare(self): s = self.mxs self.object_names = self._mxs_get_objects_names() def _is_emitter(self, o): is_instance, _ = o.isInstance() is_mesh, _ = o.isMesh() if(not is_mesh and not is_instance): return False if(is_mesh): nt, _ = o.getTrianglesCount() mats = [] for i in range(nt): m, _ = o.getTriangleMaterial(i) if(not m.isNull()): if(m not in mats): mats.append(m) for m in mats: nl, _ = m.getNumLayers() for i in range(nl): l = m.getLayer(i) e = l.getEmitter() if(not e.isNull()): return True if(is_instance): m, _ = o.getMaterial() if(not m.isNull()): nl, _ = m.getNumLayers() for i in range(nl): l = m.getLayer(i) e = l.getEmitter() if(not e.isNull()): return True return False def _global_transform(self, o): cb, _ = o.getWorldTransform() o = cb.origin x = cb.xAxis y = cb.yAxis z = cb.zAxis rb = [[o.x(), o.y(), o.z()], [x.x(), x.y(), x.z()], [y.x(), y.y(), y.z()], [z.x(), z.y(), z.z()]] rp = ((0.0, 0.0, 0.0), (1.0, 0.0, 0.0), (0.0, 1.0, 0.0), (0.0, 0.0, 1.0), ) return rb, rp def objects(self, only_emitters=False): if(only_emitters): s = self.mxs data = [] log("converting emitters..", 2) for n in self.object_names: d = None o = s.getObject(n) if(self._is_emitter(o)): d = self._mxs_object(o) if(d is not None): b, p = self._global_transform(o) d['base'] = b d['pivot'] = p d['parent'] = None data.append(d) else: s = self.mxs data = [] log("converting empties, meshes and instances..", 2) for n in self.object_names: d = None o = s.getObject(n) d = self._mxs_object(o) if(d is not None): data.append(d) return data def cameras(self): s = self.mxs data = [] log("converting cameras..", 2) nms = s.getCameraNames() cams = [] if(type(nms) == list): for n in nms: cams.append(s.getCamera(n)) for c in cams: d = self._mxs_camera(c) if(d is not None): data.append(d) # set active camera if(len(cams) > 1): # if there is just one camera, this behaves badly. # use it just when there are two or more cameras.. active_cam = s.getActiveCamera() active_cam_name = active_cam.getName() for o in data: if(o['type'] == 'CAMERA'): if(o['name'] == active_cam_name): o['active'] = True else: for o in data: if(o['type'] == 'CAMERA'): o['active'] = True return data def sun(self): s = self.mxs data = [] env = s.getEnvironment() if(env.getSunProperties()[0] == 1): log("converting sun..", 2) if(env.getSunPositionType() == 2): v, _ = env.getSunDirection() else: v, _ = env.getSunDirectionUsedForRendering() d = {'name': "The Sun", 'xyz': (v.x(), v.y(), v.z()), 'type': 'SUN', } data.append(d) return data class MXSSceneWrapper(): def __init__(self, load_extensions=True, ): if(__name__ != "__main__"): if(platform.system() == 'Darwin'): raise ImportError("No pymaxwell directly in Blender on Mac OS X..") log(self.__class__.__name__, 1, LogStyles.MESSAGE, prefix="* ", ) log("creating new scene..", 2, prefix="* ", ) self.mxs = Cmaxwell(mwcallback) pid = utils.get_plugin_id() if(pid != ""): # write here directly, even though it is also part of scene data, but api change just for this is pointless.. self.mxs.setPluginID(pid) self.mgr = None if(load_extensions): log("loadinf extensions..", 2, prefix="* ", ) self.mgr = CextensionManager.instance() self.mgr.loadAllExtensions() class MXMReader(): def __init__(self, mxm_path, ): def texture(t): if(t is None): return None if(t.isEmpty()): return None d = {'path': t.getPath(), 'use_global_map': t.useGlobalMap, 'channel': t.uvwChannelID, 'brightness': t.brightness * 100, 'contrast': t.contrast * 100, 'saturation': t.saturation * 100, 'hue': t.hue * 180, 'rotation': t.rotation, 'invert': t.invert, 'interpolation': t.typeInterpolation, 'use_alpha': t.useAlpha, 'repeat': [t.scale.x(), t.scale.y()], 'mirror': [t.uIsMirrored, t.vIsMirrored], 'offset': [t.offset.x(), t.offset.y()], 'clamp': [int(t.clampMin * 255), int(t.clampMax * 255)], 'tiling_units': t.useAbsoluteUnits, 'tiling_method': [t.uIsTiled, t.vIsTiled], 'normal_mapping_flip_red': t.normalMappingFlipRed, 'normal_mapping_flip_green': t.normalMappingFlipGreen, 'normal_mapping_full_range_blue': t.normalMappingFullRangeBlue, } # t.cosA # t.doGammaCorrection # t.sinA # t.theTextureExtensions d['procedural'] = [] if(t.hasProceduralTextures()): n = t.getProceduralTexturesCount() for i in range(n): pd = extension(None, None, t, i) d['procedural'].append(pd) return d def material(s, m): data = {} if(m.isNull()): return data # defaults bsdfd = {'visible': True, 'weight': 100.0, 'weight_map_enabled': False, 'weight_map': None, 'ior': 0, 'complex_ior': "", 'reflectance_0': (0.6, 0.6, 0.6, ), 'reflectance_0_map_enabled': False, 'reflectance_0_map': None, 'reflectance_90': (1.0, 1.0, 1.0, ), 'reflectance_90_map_enabled': False, 'reflectance_90_map': None, 'transmittance': (0.0, 0.0, 0.0), 'transmittance_map_enabled': False, 'transmittance_map': None, 'attenuation': 1.0, 'attenuation_units': 0, 'nd': 3.0, 'force_fresnel': False, 'k': 0.0, 'abbe': 1.0, 'r2_enabled': False, 'r2_falloff_angle': 75.0, 'r2_influence': 0.0, 'roughness': 100.0, 'roughness_map_enabled': False, 'roughness_map': None, 'bump': 30.0, 'bump_map_enabled': False, 'bump_map': None, 'bump_map_use_normal': False, 'bump_normal': 100.0, 'anisotropy': 0.0, 'anisotropy_map_enabled': False, 'anisotropy_map': None, 'anisotropy_angle': 0.0, 'anisotropy_angle_map_enabled': False, 'anisotropy_angle_map': None, 'scattering': (0.5, 0.5, 0.5, ), 'coef': 0.0, 'asymmetry': 0.0, 'single_sided': False, 'single_sided_value': 1.0, 'single_sided_map_enabled': False, 'single_sided_map': None, 'single_sided_min': 0.001, 'single_sided_max': 10.0, } coatingd = {'enabled': False, 'thickness': 500.0, 'thickness_map_enabled': False, 'thickness_map': None, 'thickness_map_min': 100.0, 'thickness_map_max': 1000.0, 'ior': 0, 'complex_ior': "", 'reflectance_0': (0.6, 0.6, 0.6, ), 'reflectance_0_map_enabled': False, 'reflectance_0_map': None, 'reflectance_90': (1.0, 1.0, 1.0, ), 'reflectance_90_map_enabled': False, 'reflectance_90_map': None, 'nd': 3.0, 'force_fresnel': False, 'k': 0.0, 'r2_enabled': False, 'r2_falloff_angle': 75.0, } displacementd = {'enabled': False, 'map': None, 'type': 1, 'subdivision': 5, 'adaptive': False, 'subdivision_method': 0, 'offset': 0.5, 'smoothing': True, 'uv_interpolation': 2, 'height': 2.0, 'height_units': 0, 'v3d_preset': 0, 'v3d_transform': 0, 'v3d_rgb_mapping': 0, 'v3d_scale': (1.0, 1.0, 1.0), } emitterd = {'enabled': False, 'type': 0, 'ies_data': "", 'ies_intensity': 1.0, 'spot_map_enabled': False, 'spot_map': "", 'spot_cone_angle': 45.0, 'spot_falloff_angle': 10.0, 'spot_falloff_type': 0, 'spot_blur': 1.0, 'emission': 0, 'color': (1.0, 1.0, 1.0, ), 'color_black_body_enabled': False, 'color_black_body': 6500.0, 'luminance': 0, 'luminance_power': 40.0, 'luminance_efficacy': 17.6, 'luminance_output': 100.0, 'temperature_value': 6500.0, 'hdr_map': None, 'hdr_intensity': 1.0, } layerd = {'visible': True, 'opacity': 100.0, 'opacity_map_enabled': False, 'opacity_map': None, 'blending': 0, } globald = {'override_map': None, 'bump': 30.0, 'bump_map_enabled': False, 'bump_map': None, 'bump_map_use_normal': False, 'bump_normal': 100.0, 'dispersion': False, 'shadow': False, 'matte': False, 'priority': 0, 'id': (0.0, 0.0, 0.0), 'active_display_map': None, } # structure structure = [] nl, _ = m.getNumLayers() for i in range(nl): l = m.getLayer(i) ln, _ = l.getName() nb, _ = l.getNumBSDFs() bs = [] for j in range(nb): b = l.getBSDF(j) bn = b.getName() bs.append([bn, b]) ls = [ln, l, bs] structure.append(ls) # default data data['global_props'] = globald.copy() data['displacement'] = displacementd.copy() data['layers'] = [] for i, sl in enumerate(structure): bsdfs = [] for j, sb in enumerate(sl[2]): bsdfs.append({'name': sb[0], 'bsdf_props': bsdfd.copy(), 'coating': coatingd.copy(), }) layer = {'name': sl[0], 'layer_props': layerd.copy(), 'bsdfs': bsdfs, 'emitter': emitterd.copy(), } data['layers'].append(layer) # custom data def global_props(m, d): t, _ = m.getGlobalMap() d['override_map'] = texture(t) a, _ = m.getAttribute('bump') if(a.activeType == MAP_TYPE_BITMAP): d['bump_map_enabled'] = True d['bump_map'] = texture(a.textureMap) d['bump_map_use_normal'] = m.getNormalMapState()[0] if(d['bump_map_use_normal']): d['bump_normal'] = a.value else: d['bump'] = a.value else: d['bump_map_enabled'] = False d['bump_map'] = None d['bump_map_use_normal'] = m.getNormalMapState()[0] if(d['bump_map_use_normal']): d['bump_normal'] = a.value else: d['bump'] = a.value d['dispersion'] = m.getDispersion()[0] d['shadow'] = m.getMatteShadow()[0] d['matte'] = m.getMatte()[0] d['priority'] = m.getNestedPriority()[0] c, _ = m.getColorID() d['id'] = [c.r(), c.g(), c.b()] return d data['global_props'] = global_props(m, data['global_props']) def displacement(m, d): if(not m.isDisplacementEnabled()[0]): return d d['enabled'] = True t, _ = m.getDisplacementMap() d['map'] = texture(t) displacementType, subdivisionLevel, smoothness, offset, subdivisionType, interpolationUvType, minLOD, maxLOD, _ = m.getDisplacementCommonParameters() height, absoluteHeight, adaptive, _ = m.getHeightMapDisplacementParameters() scale, transformType, mapping, preset, _ = m.getVectorDisplacementParameters() d['type'] = displacementType d['subdivision'] = subdivisionLevel d['adaptive'] = adaptive d['subdivision_method'] = subdivisionType d['offset'] = offset d['smoothing'] = bool(smoothness) d['uv_interpolation'] = interpolationUvType d['height'] = height d['height_units'] = absoluteHeight d['v3d_preset'] = preset d['v3d_transform'] = transformType d['v3d_rgb_mapping'] = mapping d['v3d_scale'] = (scale.x(), scale.y(), scale.z(), ) return d data['displacement'] = displacement(m, data['displacement']) def cattribute_rgb(a): if(a.activeType == MAP_TYPE_BITMAP): c = (a.rgb.r(), a.rgb.g(), a.rgb.b()) e = True m = texture(a.textureMap) else: c = (a.rgb.r(), a.rgb.g(), a.rgb.b()) e = False m = None return c, e, m def cattribute_value(a): if(a.activeType == MAP_TYPE_BITMAP): v = a.value e = True m = texture(a.textureMap) else: v = a.value e = False m = None return v, e, m def layer_props(l, d): d['visible'] = l.getEnabled()[0] d['blending'] = l.getStackedBlendingMode()[0] a, _ = l.getAttribute('weight') if(a.activeType == MAP_TYPE_BITMAP): d['opacity'] = a.value d['opacity_map_enabled'] = True d['opacity_map'] = texture(a.textureMap) else: d['opacity'] = a.value d['opacity_map_enabled'] = False d['opacity_map'] = None return d def emitter(l, d): e = l.getEmitter() if(e.isNull()): d['enabled'] = False return d d['enabled'] = True d['type'] = e.getLobeType()[0] d['ies_data'] = e.getLobeIES() d['ies_intensity'] = e.getIESLobeIntensity()[0] t, _ = e.getLobeImageProjectedMap() d['spot_map_enabled'] = (not t.isEmpty()) d['spot_map'] = texture(t) d['spot_cone_angle'] = e.getSpotConeAngle()[0] d['spot_falloff_angle'] = e.getSpotFallOffAngle()[0] d['spot_falloff_type'] = e.getSpotFallOffType()[0] d['spot_blur'] = e.getSpotBlur()[0] d['emission'] = e.getActiveEmissionType()[0] ep, _ = e.getPair() colorType, units, _ = e.getActivePair() d['color'] = (ep.rgb.r(), ep.rgb.g(), ep.rgb.b(), ) d['color_black_body'] = ep.temperature d['luminance'] = units if(units == EMISSION_UNITS_WATTS_AND_LUMINOUS_EFFICACY): d['luminance_power'] = ep.watts d['luminance_efficacy'] = ep.luminousEfficacy elif(units == EMISSION_UNITS_LUMINOUS_POWER): d['luminance_output'] = ep.luminousPower elif(units == EMISSION_UNITS_ILLUMINANCE): d['luminance_output'] = ep.illuminance elif(units == EMISSION_UNITS_LUMINOUS_INTENSITY): d['luminance_output'] = ep.luminousIntensity elif(units == EMISSION_UNITS_LUMINANCE): d['luminance_output'] = ep.luminance if(colorType == EMISSION_COLOR_TEMPERATURE): d['color_black_body_enabled'] = True d['temperature_value'] = e.getTemperature()[0] a, _ = e.getMXI() if(a.activeType == MAP_TYPE_BITMAP): d['hdr_map'] = texture(a.textureMap) d['hdr_intensity'] = a.value else: d['hdr_map'] = None d['hdr_intensity'] = a.value return d def bsdf_props(b, d): d['visible'] = b.getState()[0] a, _ = b.getWeight() if(a.activeType == MAP_TYPE_BITMAP): d['weight_map_enabled'] = True d['weight'] = a.value d['weight_map'] = texture(a.textureMap) else: d['weight_map_enabled'] = False d['weight'] = a.value d['weight_map'] = None r = b.getReflectance() d['ior'] = r.getActiveIorMode()[0] d['complex_ior'] = r.getComplexIor() d['reflectance_0'], d['reflectance_0_map_enabled'], d['reflectance_0_map'] = cattribute_rgb(r.getAttribute('color')[0]) d['reflectance_90'], d['reflectance_90_map_enabled'], d['reflectance_90_map'] = cattribute_rgb(r.getAttribute('color.tangential')[0]) d['transmittance'], d['transmittance_map_enabled'], d['transmittance_map'] = cattribute_rgb(r.getAttribute('transmittance.color')[0]) d['attenuation_units'], d['attenuation'] = r.getAbsorptionDistance() d['nd'], d['abbe'], _ = r.getIOR() d['force_fresnel'], _ = r.getForceFresnel() d['k'], _ = r.getConductor() d['r2_falloff_angle'], d['r2_influence'], d['r2_enabled'], _ = r.getFresnelCustom() d['roughness'], d['roughness_map_enabled'], d['roughness_map'] = cattribute_value(b.getAttribute('roughness')[0]) d['bump_map_use_normal'] = b.getNormalMapState()[0] if(d['bump_map_use_normal']): d['bump_normal'], d['bump_map_enabled'], d['bump_map'] = cattribute_value(b.getAttribute('bump')[0]) else: d['bump'], d['bump_map_enabled'], d['bump_map'] = cattribute_value(b.getAttribute('bump')[0]) d['anisotropy'], d['anisotropy_map_enabled'], d['anisotropy_map'] = cattribute_value(b.getAttribute('anisotropy')[0]) d['anisotropy_angle'], d['anisotropy_angle_map_enabled'], d['anisotropy_angle_map'] = cattribute_value(b.getAttribute('angle')[0]) a, _ = r.getAttribute('scattering') d['scattering'] = (a.rgb.r(), a.rgb.g(), a.rgb.b(), ) d['coef'], d['asymmetry'], d['single_sided'], _ = r.getScatteringParameters() d['single_sided_value'], d['single_sided_map_enabled'], d['single_sided_map'] = cattribute_value(r.getScatteringThickness()[0]) d['single_sided_min'], d['single_sided_max'], _ = r.getScatteringThicknessRange() return d def coating(b, d): nc, _ = b.getNumCoatings() if(nc > 0): c = b.getCoating(0) else: d['enabled'] = False return d d['enabled'] = True d['thickness'], d['thickness_map_enabled'], d['thickness_map'] = cattribute_value(c.getThickness()[0]) d['thickness_map_min'], d['thickness_map_max'], _ = c.getThicknessRange() r = c.getReflectance() d['ior'] = r.getActiveIorMode()[0] d['complex_ior'] = r.getComplexIor() d['reflectance_0'], d['reflectance_0_map_enabled'], d['reflectance_0_map'] = cattribute_rgb(r.getAttribute('color')[0]) d['reflectance_90'], d['reflectance_90_map_enabled'], d['reflectance_90_map'] = cattribute_rgb(r.getAttribute('color.tangential')[0]) d['nd'], _, _ = r.getIOR() d['force_fresnel'], _ = r.getForceFresnel() d['k'], _ = r.getConductor() d['r2_falloff_angle'], _, d['r2_enabled'], _ = r.getFresnelCustom() return d for i, sl in enumerate(structure): l = sl[1] data['layers'][i]['layer_props'] = layer_props(l, data['layers'][i]['layer_props']) data['layers'][i]['emitter'] = emitter(l, data['layers'][i]['emitter']) for j, bs in enumerate(sl[2]): b = bs[1] data['layers'][i]['bsdfs'][j]['bsdf_props'] = bsdf_props(b, data['layers'][i]['bsdfs'][j]['bsdf_props']) data['layers'][i]['bsdfs'][j]['coating'] = coating(b, data['layers'][i]['bsdfs'][j]['coating']) return data def extension(s, m, pt=None, pi=None, ): def texture(t): if(t is None): return None if(t.isEmpty()): return None d = {'path': t.getPath(), 'use_global_map': t.useGlobalMap, 'channel': t.uvwChannelID, 'brightness': t.brightness * 100, 'contrast': t.contrast * 100, 'saturation': t.saturation * 100, 'hue': t.hue * 180, 'rotation': t.rotation, 'invert': t.invert, 'interpolation': t.typeInterpolation, 'use_alpha': t.useAlpha, 'repeat': [t.scale.x(), t.scale.y()], 'mirror': [t.uIsMirrored, t.vIsMirrored], 'offset': [t.offset.x(), t.offset.y()], 'clamp': [int(t.clampMin * 255), int(t.clampMax * 255)], 'tiling_units': t.useAbsoluteUnits, 'tiling_method': [t.uIsTiled, t.vIsTiled], 'normal_mapping_flip_red': t.normalMappingFlipRed, 'normal_mapping_flip_green': t.normalMappingFlipGreen, 'normal_mapping_full_range_blue': t.normalMappingFullRangeBlue, } return d def mxparamlistarray(v): return None def rgb(v): return (v.r(), v.g(), v.b()) if(pt is not None and pi is not None): params = pt.getProceduralTexture(pi) else: params, _ = m.getMaterialModifierExtensionParams() types = [(0, 'UCHAR', params.getByte, ), (1, 'UINT', params.getUInt, ), (2, 'INT', params.getInt, ), (3, 'FLOAT', params.getFloat, ), (4, 'DOUBLE', params.getDouble, ), (5, 'STRING', params.getString, ), (6, 'FLOATARRAY', params.getFloatArray, ), (7, 'DOUBLEARRAY', params.getDoubleArray, ), (8, 'BYTEARRAY', params.getByteArray, ), (9, 'INTARRAY', params.getIntArray, ), (10, 'MXPARAMLIST', params.getTextureMap, ), (11, 'MXPARAMLISTARRAY', mxparamlistarray, ), (12, 'RGB', params.getRgb, ), ] d = {} for i in range(params.getNumItems()): name, data, _, _, data_type, _, data_count, _ = params.getByIndex(i) _, _, f = types[data_type] k = name if(data_type not in [10, 11, 12]): v, _ = f(name) else: if(data_type == 10): v = texture(f(name)[0]) elif(data_type == 11): pass elif(data_type == 12): v = rgb(f(name)[0]) d[k] = v return d log("{0} {1} {0}".format("-" * 30, self.__class__.__name__), 0, LogStyles.MESSAGE, prefix="", ) log("path: {}".format(mxm_path), 1, LogStyles.MESSAGE) s = Cmaxwell(mwcallback) m = s.readMaterial(mxm_path) self.data = material(s, m) if(m.hasMaterialModifier()): self.data['extension'] = extension(s, m) class MXSReferenceReader(): def __init__(self, path, ): log("maxwell meshes to data:", 1) log("reading mxs scene from: {0}".format(path), 2) scene = Cmaxwell(mwcallback) ok = scene.readMXS(path) if(not ok): raise RuntimeError("Error during reading scene {}".format(path)) nms = self.get_objects_names(scene) data = [] log("reading meshes..", 2) for n in nms: d = None o = scene.getObject(n) if(not o.isNull()): if(o.isMesh()[0] == 1 and o.isInstance()[0] == 0): d = self.object(o) if(d is not None): data.append(d) log("reading instances..", 2) for n in nms: d = None o = scene.getObject(n) if(not o.isNull()): if(o.isMesh()[0] == 0 and o.isInstance()[0] == 1): io = o.getInstanced() ion = io.getName()[0] for a in data: if(a['name'] == ion): b, p = self.global_transform(o) d = {'name': o.getName()[0], 'base': b, 'pivot': p, 'vertices': a['vertices'][:], } if(d is not None): data.append(d) self.data = data log("done.", 2) def get_objects_names(self, scene): it = CmaxwellObjectIterator() o = it.first(scene) l = [] while not o.isNull(): name, _ = o.getName() l.append(name) o = it.next() return l def object(self, o): is_instance, _ = o.isInstance() is_mesh, _ = o.isMesh() if(is_instance == 0 and is_mesh == 0): return None def get_verts(o): vs = [] nv, _ = o.getVerticesCount() for i in range(nv): v, _ = o.getVertex(i, 0) vs.append((v.x(), v.y(), v.z())) return vs b, p = self.global_transform(o) r = {'name': o.getName()[0], 'base': b, 'pivot': p, 'vertices': [], } if(is_instance == 1): io = o.getInstanced() r['vertices'] = get_verts(io) else: r['vertices'] = get_verts(o) return r def global_transform(self, o): cb, _ = o.getWorldTransform() o = cb.origin x = cb.xAxis y = cb.yAxis z = cb.zAxis rb = [[o.x(), o.y(), o.z()], [x.x(), x.y(), x.z()], [y.x(), y.y(), y.z()], [z.x(), z.y(), z.z()]] rp = ((0.0, 0.0, 0.0), (1.0, 0.0, 0.0), (0.0, 1.0, 0.0), (0.0, 0.0, 1.0), ) return rb, rp
gpl-2.0
albertomurillo/ansible
lib/ansible/modules/network/aci/aci_firmware_source.py
27
7594
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2018, Dag Wieers (dagwieers) <[email protected]> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'certified'} DOCUMENTATION = r''' --- module: aci_firmware_source short_description: Manage firmware image sources (firmware:OSource) description: - Manage firmware image sources on Cisco ACI fabrics. version_added: '2.5' options: source: description: - The identifying name for the outside source of images, such as an HTTP or SCP server. type: str required: yes aliases: [ name, source_name ] polling_interval: description: - Polling interval in minutes. type: int url_protocol: description: - The Firmware download protocol. type: str choices: [ http, local, scp, usbkey ] default: scp aliases: [ url_proto ] url: description: The firmware URL for the image(s) on the source. type: str url_password: description: The Firmware password or key string. type: str url_username: description: The username for the source. type: str state: description: - Use C(present) or C(absent) for adding or removing. - Use C(query) for listing an object or multiple objects. type: str choices: [ absent, present, query ] default: present extends_documentation_fragment: aci seealso: - name: APIC Management Information Model reference description: More information about the internal APIC class B(firmware:OSource). link: https://developer.cisco.com/docs/apic-mim-ref/ author: - Dag Wieers (@dagwieers) ''' EXAMPLES = r''' - name: Add firmware source aci_firmware_source: host: apic username: admin password: SomeSecretPassword source: aci-msft-pkg-3.1.1i.zip url: foo.bar.cisco.com/download/cisco/aci/aci-msft-pkg-3.1.1i.zip url_protocol: http state: present delegate_to: localhost - name: Remove firmware source aci_firmware_source: host: apic username: admin password: SomeSecretPassword source: aci-msft-pkg-3.1.1i.zip state: absent delegate_to: localhost - name: Query a specific firmware source aci_firmware_source: host: apic username: admin password: SomeSecretPassword source: aci-msft-pkg-3.1.1i.zip state: query delegate_to: localhost register: query_result - name: Query all firmware sources aci_firmware_source: host: apic username: admin password: SomeSecretPassword state: query delegate_to: localhost register: query_result ''' RETURN = r''' current: description: The existing configuration from the APIC after the module has finished returned: success type: list sample: [ { "fvTenant": { "attributes": { "descr": "Production environment", "dn": "uni/tn-production", "name": "production", "nameAlias": "", "ownerKey": "", "ownerTag": "" } } } ] error: description: The error information as returned from the APIC returned: failure type: dict sample: { "code": "122", "text": "unknown managed object class foo" } raw: description: The raw output returned by the APIC REST API (xml or json) returned: parse error type: str sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>' sent: description: The actual/minimal configuration pushed to the APIC returned: info type: list sample: { "fvTenant": { "attributes": { "descr": "Production environment" } } } previous: description: The original configuration from the APIC before the module has started returned: info type: list sample: [ { "fvTenant": { "attributes": { "descr": "Production", "dn": "uni/tn-production", "name": "production", "nameAlias": "", "ownerKey": "", "ownerTag": "" } } } ] proposed: description: The assembled configuration from the user-provided parameters returned: info type: dict sample: { "fvTenant": { "attributes": { "descr": "Production environment", "name": "production" } } } filter_string: description: The filter string used for the request returned: failure or debug type: str sample: ?rsp-prop-include=config-only method: description: The HTTP method used for the request to the APIC returned: failure or debug type: str sample: POST response: description: The HTTP response from the APIC returned: failure or debug type: str sample: OK (30 bytes) status: description: The HTTP status from the APIC returned: failure or debug type: int sample: 200 url: description: The HTTP url used for the request to the APIC returned: failure or debug type: str sample: https://10.11.12.13/api/mo/uni/tn-production.json ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec def main(): argument_spec = aci_argument_spec() argument_spec.update( source=dict(type='str', aliases=['name', 'source_name']), # Not required for querying all objects polling_interval=dict(type='int'), url=dict(type='str'), url_username=dict(type='str'), url_password=dict(type='str', no_log=True), url_protocol=dict(type='str', default='scp', choices=['http', 'local', 'scp', 'usbkey'], aliases=['url_proto']), state=dict(type='str', default='present', choices=['absent', 'present', 'query']), ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, required_if=[ ['state', 'absent', ['source']], ['state', 'present', ['url_protocol', 'source', 'url']], ], ) polling_interval = module.params['polling_interval'] url_protocol = module.params['url_protocol'] state = module.params['state'] source = module.params['source'] url = module.params['url'] url_password = module.params['url_password'] url_username = module.params['url_username'] aci = ACIModule(module) aci.construct_url( root_class=dict( aci_class='firmwareOSource', aci_rn='fabric/fwrepop', module_object=source, target_filter={'name': source}, ), ) aci.get_existing() if state == 'present': aci.payload( aci_class='firmwareOSource', class_config=dict( name=source, url=url, password=url_password, pollingInterval=polling_interval, proto=url_protocol, user=url_username, ), ) aci.get_diff(aci_class='firmwareOSource') aci.post_config() elif state == 'absent': aci.delete_config() aci.exit_json() if __name__ == "__main__": main()
gpl-3.0
mistercrunch/panoramix
superset/views/base_api.py
2
21953
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import functools import logging from typing import Any, Callable, cast, Dict, List, Optional, Set, Tuple, Type, Union from apispec import APISpec from apispec.exceptions import DuplicateComponentNameError from flask import Blueprint, g, Response from flask_appbuilder import AppBuilder, Model, ModelRestApi from flask_appbuilder.api import expose, protect, rison, safe from flask_appbuilder.models.filters import BaseFilter, Filters from flask_appbuilder.models.sqla.filters import FilterStartsWith from flask_appbuilder.models.sqla.interface import SQLAInterface from flask_babel import lazy_gettext as _ from marshmallow import fields, Schema from sqlalchemy import and_, distinct, func from sqlalchemy.orm.query import Query from superset.extensions import db, event_logger, security_manager from superset.models.core import FavStar from superset.models.dashboard import Dashboard from superset.models.slice import Slice from superset.schemas import error_payload_content from superset.sql_lab import Query as SqllabQuery from superset.stats_logger import BaseStatsLogger from superset.typing import FlaskResponse from superset.utils.core import time_function logger = logging.getLogger(__name__) get_related_schema = { "type": "object", "properties": { "page_size": {"type": "integer"}, "page": {"type": "integer"}, "include_ids": {"type": "array", "items": {"type": "integer"}}, "filter": {"type": "string"}, }, } class RelatedResultResponseSchema(Schema): value = fields.Integer(description="The related item identifier") text = fields.String(description="The related item string representation") class RelatedResponseSchema(Schema): count = fields.Integer(description="The total number of related values") result = fields.List(fields.Nested(RelatedResultResponseSchema)) class DistinctResultResponseSchema(Schema): text = fields.String(description="The distinct item") class DistincResponseSchema(Schema): count = fields.Integer(description="The total number of distinct values") result = fields.List(fields.Nested(DistinctResultResponseSchema)) def statsd_metrics(f: Callable[..., Any]) -> Callable[..., Any]: """ Handle sending all statsd metrics from the REST API """ def wraps(self: "BaseSupersetModelRestApi", *args: Any, **kwargs: Any) -> Response: try: duration, response = time_function(f, self, *args, **kwargs) except Exception as ex: self.incr_stats("error", f.__name__) raise ex self.send_stats_metrics(response, f.__name__, duration) return response return functools.update_wrapper(wraps, f) class RelatedFieldFilter: # data class to specify what filter to use on a /related endpoint # pylint: disable=too-few-public-methods def __init__(self, field_name: str, filter_class: Type[BaseFilter]): self.field_name = field_name self.filter_class = filter_class class BaseFavoriteFilter(BaseFilter): # pylint: disable=too-few-public-methods """ Base Custom filter for the GET list that filters all dashboards, slices that a user has favored or not """ name = _("Is favorite") arg_name = "" class_name = "" """ The FavStar class_name to user """ model: Type[Union[Dashboard, Slice, SqllabQuery]] = Dashboard """ The SQLAlchemy model """ def apply(self, query: Query, value: Any) -> Query: # If anonymous user filter nothing if security_manager.current_user is None: return query users_favorite_query = db.session.query(FavStar.obj_id).filter( and_( FavStar.user_id == g.user.get_id(), FavStar.class_name == self.class_name, ) ) if value: return query.filter(and_(self.model.id.in_(users_favorite_query))) return query.filter(and_(~self.model.id.in_(users_favorite_query))) class BaseSupersetModelRestApi(ModelRestApi): """ Extends FAB's ModelResApi to implement specific superset generic functionality """ csrf_exempt = False method_permission_name = { "bulk_delete": "delete", "data": "list", "data_from_cache": "list", "delete": "delete", "distinct": "list", "export": "mulexport", "import_": "add", "get": "show", "get_list": "list", "info": "list", "post": "add", "put": "edit", "refresh": "edit", "related": "list", "related_objects": "list", "schemas": "list", "select_star": "list", "table_metadata": "list", "test_connection": "post", "thumbnail": "list", "viz_types": "list", } order_rel_fields: Dict[str, Tuple[str, str]] = {} """ Impose ordering on related fields query:: order_rel_fields = { "<RELATED_FIELD>": ("<RELATED_FIELD_FIELD>", "<asc|desc>"), ... } """ # pylint: disable=pointless-string-statement related_field_filters: Dict[str, Union[RelatedFieldFilter, str]] = {} """ Declare the filters for related fields:: related_fields = { "<RELATED_FIELD>": <RelatedFieldFilter>) } """ # pylint: disable=pointless-string-statement filter_rel_fields: Dict[str, BaseFilter] = {} """ Declare the related field base filter:: filter_rel_fields_field = { "<RELATED_FIELD>": "<FILTER>") } """ # pylint: disable=pointless-string-statement allowed_rel_fields: Set[str] = set() """ Declare a set of allowed related fields that the `related` endpoint supports """ # pylint: disable=pointless-string-statement text_field_rel_fields: Dict[str, str] = {} """ Declare an alternative for the human readable representation of the Model object:: text_field_rel_fields = { "<RELATED_FIELD>": "<RELATED_OBJECT_FIELD>" } """ # pylint: disable=pointless-string-statement allowed_distinct_fields: Set[str] = set() openapi_spec_component_schemas: Tuple[Type[Schema], ...] = tuple() """ Add extra schemas to the OpenAPI component schemas section """ # pylint: disable=pointless-string-statement add_columns: List[str] edit_columns: List[str] list_columns: List[str] show_columns: List[str] responses = { "400": {"description": "Bad request", "content": error_payload_content}, "401": {"description": "Unauthorized", "content": error_payload_content}, "403": {"description": "Forbidden", "content": error_payload_content}, "404": {"description": "Not found", "content": error_payload_content}, "422": { "description": "Could not process entity", "content": error_payload_content, }, "500": {"description": "Fatal error", "content": error_payload_content}, } def __init__(self) -> None: # Setup statsd self.stats_logger = BaseStatsLogger() # Add base API spec base query parameter schemas if self.apispec_parameter_schemas is None: # type: ignore self.apispec_parameter_schemas = {} self.apispec_parameter_schemas["get_related_schema"] = get_related_schema if self.openapi_spec_component_schemas is None: self.openapi_spec_component_schemas = () self.openapi_spec_component_schemas = self.openapi_spec_component_schemas + ( RelatedResponseSchema, DistincResponseSchema, ) super().__init__() def add_apispec_components(self, api_spec: APISpec) -> None: """ Adds extra OpenApi schema spec components, these are declared on the `openapi_spec_component_schemas` class property """ for schema in self.openapi_spec_component_schemas: try: api_spec.components.schema( schema.__name__, schema=schema, ) except DuplicateComponentNameError: pass super().add_apispec_components(api_spec) def create_blueprint( self, appbuilder: AppBuilder, *args: Any, **kwargs: Any ) -> Blueprint: self.stats_logger = self.appbuilder.get_app.config["STATS_LOGGER"] return super().create_blueprint(appbuilder, *args, **kwargs) def _init_properties(self) -> None: model_id = self.datamodel.get_pk_name() if self.list_columns is None and not self.list_model_schema: self.list_columns = [model_id] if self.show_columns is None and not self.show_model_schema: self.show_columns = [model_id] if self.edit_columns is None and not self.edit_model_schema: self.edit_columns = [model_id] if self.add_columns is None and not self.add_model_schema: self.add_columns = [model_id] super()._init_properties() def _get_related_filter( self, datamodel: SQLAInterface, column_name: str, value: str ) -> Filters: filter_field = self.related_field_filters.get(column_name) if isinstance(filter_field, str): filter_field = RelatedFieldFilter(cast(str, filter_field), FilterStartsWith) filter_field = cast(RelatedFieldFilter, filter_field) search_columns = [filter_field.field_name] if filter_field else None filters = datamodel.get_filters(search_columns) base_filters = self.filter_rel_fields.get(column_name) if base_filters: filters.add_filter_list(base_filters) if value and filter_field: filters.add_filter( filter_field.field_name, filter_field.filter_class, value ) return filters def _get_distinct_filter(self, column_name: str, value: str) -> Filters: filter_field = RelatedFieldFilter(column_name, FilterStartsWith) filter_field = cast(RelatedFieldFilter, filter_field) search_columns = [filter_field.field_name] if filter_field else None filters = self.datamodel.get_filters(search_columns) filters.add_filter_list(self.base_filters) if value and filter_field: filters.add_filter( filter_field.field_name, filter_field.filter_class, value ) return filters def _get_text_for_model(self, model: Model, column_name: str) -> str: if column_name in self.text_field_rel_fields: model_column_name = self.text_field_rel_fields.get(column_name) if model_column_name: return getattr(model, model_column_name) return str(model) def _get_result_from_rows( self, datamodel: SQLAInterface, rows: List[Model], column_name: str ) -> List[Dict[str, Any]]: return [ { "value": datamodel.get_pk_value(row), "text": self._get_text_for_model(row, column_name), } for row in rows ] def _add_extra_ids_to_result( self, datamodel: SQLAInterface, column_name: str, ids: List[int], result: List[Dict[str, Any]], ) -> None: if ids: # Filter out already present values on the result values = [row["value"] for row in result] ids = [id_ for id_ in ids if id_ not in values] pk_col = datamodel.get_pk() # Fetch requested values from ids extra_rows = db.session.query(datamodel.obj).filter(pk_col.in_(ids)).all() result += self._get_result_from_rows(datamodel, extra_rows, column_name) def incr_stats(self, action: str, func_name: str) -> None: """ Proxy function for statsd.incr to impose a key structure for REST API's :param action: String with an action name eg: error, success :param func_name: The function name """ self.stats_logger.incr(f"{self.__class__.__name__}.{func_name}.{action}") def timing_stats(self, action: str, func_name: str, value: float) -> None: """ Proxy function for statsd.incr to impose a key structure for REST API's :param action: String with an action name eg: error, success :param func_name: The function name :param value: A float with the time it took for the endpoint to execute """ self.stats_logger.timing( f"{self.__class__.__name__}.{func_name}.{action}", value ) def send_stats_metrics( self, response: Response, key: str, time_delta: Optional[float] = None ) -> None: """ Helper function to handle sending statsd metrics :param response: flask response object, will evaluate if it was an error :param key: The function name :param time_delta: Optional time it took for the endpoint to execute """ if 200 <= response.status_code < 400: self.incr_stats("success", key) else: self.incr_stats("error", key) if time_delta: self.timing_stats("time", key, time_delta) @event_logger.log_this_with_context( action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.info", object_ref=False, log_to_statsd=False, ) def info_headless(self, **kwargs: Any) -> Response: """ Add statsd metrics to builtin FAB _info endpoint """ duration, response = time_function(super().info_headless, **kwargs) self.send_stats_metrics(response, self.info.__name__, duration) return response @event_logger.log_this_with_context( action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.get", object_ref=False, log_to_statsd=False, ) def get_headless(self, pk: int, **kwargs: Any) -> Response: """ Add statsd metrics to builtin FAB GET endpoint """ duration, response = time_function(super().get_headless, pk, **kwargs) self.send_stats_metrics(response, self.get.__name__, duration) return response @event_logger.log_this_with_context( action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.get_list", object_ref=False, log_to_statsd=False, ) def get_list_headless(self, **kwargs: Any) -> Response: """ Add statsd metrics to builtin FAB GET list endpoint """ duration, response = time_function(super().get_list_headless, **kwargs) self.send_stats_metrics(response, self.get_list.__name__, duration) return response @event_logger.log_this_with_context( action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.post", object_ref=False, log_to_statsd=False, ) def post_headless(self) -> Response: """ Add statsd metrics to builtin FAB POST endpoint """ duration, response = time_function(super().post_headless) self.send_stats_metrics(response, self.post.__name__, duration) return response @event_logger.log_this_with_context( action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.put", object_ref=False, log_to_statsd=False, ) def put_headless(self, pk: int) -> Response: """ Add statsd metrics to builtin FAB PUT endpoint """ duration, response = time_function(super().put_headless, pk) self.send_stats_metrics(response, self.put.__name__, duration) return response @event_logger.log_this_with_context( action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.delete", object_ref=False, log_to_statsd=False, ) def delete_headless(self, pk: int) -> Response: """ Add statsd metrics to builtin FAB DELETE endpoint """ duration, response = time_function(super().delete_headless, pk) self.send_stats_metrics(response, self.delete.__name__, duration) return response @expose("/related/<column_name>", methods=["GET"]) @protect() @safe @statsd_metrics @rison(get_related_schema) def related(self, column_name: str, **kwargs: Any) -> FlaskResponse: """Get related fields data --- get: parameters: - in: path schema: type: string name: column_name - in: query name: q content: application/json: schema: $ref: '#/components/schemas/get_related_schema' responses: 200: description: Related column data content: application/json: schema: schema: $ref: "#/components/schemas/RelatedResponseSchema" 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 404: $ref: '#/components/responses/404' 500: $ref: '#/components/responses/500' """ if column_name not in self.allowed_rel_fields: self.incr_stats("error", self.related.__name__) return self.response_404() args = kwargs.get("rison", {}) # handle pagination page, page_size = self._handle_page_args(args) try: datamodel = self.datamodel.get_related_interface(column_name) except KeyError: return self.response_404() page, page_size = self._sanitize_page_args(page, page_size) # handle ordering order_field = self.order_rel_fields.get(column_name) if order_field: order_column, order_direction = order_field else: order_column, order_direction = "", "" # handle filters filters = self._get_related_filter(datamodel, column_name, args.get("filter")) # Make the query _, rows = datamodel.query( filters, order_column, order_direction, page=page, page_size=page_size ) # produce response result = self._get_result_from_rows(datamodel, rows, column_name) # If ids are specified make sure we fetch and include them on the response ids = args.get("include_ids") self._add_extra_ids_to_result(datamodel, column_name, ids, result) return self.response(200, count=len(result), result=result) @expose("/distinct/<column_name>", methods=["GET"]) @protect() @safe @statsd_metrics @rison(get_related_schema) def distinct(self, column_name: str, **kwargs: Any) -> FlaskResponse: """Get distinct values from field data --- get: parameters: - in: path schema: type: string name: column_name - in: query name: q content: application/json: schema: $ref: '#/components/schemas/get_related_schema' responses: 200: description: Distinct field data content: application/json: schema: schema: $ref: "#/components/schemas/DistincResponseSchema" 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 404: $ref: '#/components/responses/404' 500: $ref: '#/components/responses/500' """ if column_name not in self.allowed_distinct_fields: self.incr_stats("error", self.related.__name__) return self.response_404() args = kwargs.get("rison", {}) # handle pagination page, page_size = self._sanitize_page_args(*self._handle_page_args(args)) # Create generic base filters with added request filter filters = self._get_distinct_filter(column_name, args.get("filter")) # Make the query query_count = self.appbuilder.get_session.query( func.count(distinct(getattr(self.datamodel.obj, column_name))) ) count = self.datamodel.apply_filters(query_count, filters).scalar() if count == 0: return self.response(200, count=count, result=[]) query = self.appbuilder.get_session.query( distinct(getattr(self.datamodel.obj, column_name)) ) # Apply generic base filters with added request filter query = self.datamodel.apply_filters(query, filters) # Apply sort query = self.datamodel.apply_order_by(query, column_name, "asc") # Apply pagination result = self.datamodel.apply_pagination(query, page, page_size).all() # produce response result = [ {"text": item[0], "value": item[0]} for item in result if item[0] is not None ] return self.response(200, count=count, result=result)
apache-2.0
kustodian/ansible
test/units/modules/network/f5/test_bigip_smtp.py
22
4984
# -*- coding: utf-8 -*- # # Copyright: (c) 2017, F5 Networks Inc. # GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import json import pytest import sys if sys.version_info < (2, 7): pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7") from ansible.module_utils.basic import AnsibleModule try: from library.modules.bigip_smtp import ApiParameters from library.modules.bigip_smtp import ModuleParameters from library.modules.bigip_smtp import ModuleManager from library.modules.bigip_smtp import ArgumentSpec # In Ansible 2.8, Ansible changed import paths. from test.units.compat import unittest from test.units.compat.mock import Mock from test.units.modules.utils import set_module_args except ImportError: from ansible.modules.network.f5.bigip_smtp import ApiParameters from ansible.modules.network.f5.bigip_smtp import ModuleParameters from ansible.modules.network.f5.bigip_smtp import ModuleManager from ansible.modules.network.f5.bigip_smtp import ArgumentSpec # Ansible 2.8 imports from units.compat import unittest from units.compat.mock import Mock from units.modules.utils import set_module_args fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures') fixture_data = {} def load_fixture(name): path = os.path.join(fixture_path, name) if path in fixture_data: return fixture_data[path] with open(path) as f: data = f.read() try: data = json.loads(data) except Exception: pass fixture_data[path] = data return data class TestParameters(unittest.TestCase): def test_module_parameters(self): args = dict( name='foo', smtp_server='1.1.1.1', smtp_server_port='25', smtp_server_username='admin', smtp_server_password='password', local_host_name='smtp.mydomain.com', encryption='tls', update_password='always', from_address='[email protected]', authentication=True, ) p = ModuleParameters(params=args) assert p.name == 'foo' assert p.smtp_server == '1.1.1.1' assert p.smtp_server_port == 25 assert p.smtp_server_username == 'admin' assert p.smtp_server_password == 'password' assert p.local_host_name == 'smtp.mydomain.com' assert p.encryption == 'tls' assert p.update_password == 'always' assert p.from_address == '[email protected]' assert p.authentication_disabled is None assert p.authentication_enabled is True def test_api_parameters(self): p = ApiParameters(params=load_fixture('load_sys_smtp_server.json')) assert p.name == 'foo' assert p.smtp_server == 'mail.foo.bar' assert p.smtp_server_port == 465 assert p.smtp_server_username == 'admin' assert p.smtp_server_password == '$M$Ch$this-is-encrypted==' assert p.local_host_name == 'mail-host.foo.bar' assert p.encryption == 'ssl' assert p.from_address == '[email protected]' assert p.authentication_disabled is None assert p.authentication_enabled is True class TestManager(unittest.TestCase): def setUp(self): self.spec = ArgumentSpec() def test_create_monitor(self, *args): set_module_args(dict( name='foo', smtp_server='1.1.1.1', smtp_server_port='25', smtp_server_username='admin', smtp_server_password='password', local_host_name='smtp.mydomain.com', encryption='tls', update_password='always', from_address='[email protected]', authentication=True, partition='Common', provider=dict( server='localhost', password='password', user='admin' ) )) module = AnsibleModule( argument_spec=self.spec.argument_spec, supports_check_mode=self.spec.supports_check_mode ) # Override methods in the specific type of manager mm = ModuleManager(module=module) mm.exists = Mock(side_effect=[False, True]) mm.create_on_device = Mock(return_value=True) results = mm.exec_module() assert results['changed'] is True assert results['encryption'] == 'tls' assert results['smtp_server'] == '1.1.1.1' assert results['smtp_server_port'] == 25 assert results['local_host_name'] == 'smtp.mydomain.com' assert results['authentication'] is True assert results['from_address'] == '[email protected]' assert 'smtp_server_username' not in results assert 'smtp_server_password' not in results
gpl-3.0
nzavagli/UnrealPy
UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/Jinja2-2.7.3/docs/jinjaext.py
17
6953
# -*- coding: utf-8 -*- """ Jinja Documentation Extensions ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Support for automatically documenting filters and tests. :copyright: Copyright 2008 by Armin Ronacher. :license: BSD. """ import collections import os import re import inspect import jinja2 from itertools import islice from types import BuiltinFunctionType from docutils import nodes from docutils.statemachine import ViewList from sphinx.ext.autodoc import prepare_docstring from sphinx.application import TemplateBridge from pygments.style import Style from pygments.token import Keyword, Name, Comment, String, Error, \ Number, Operator, Generic from jinja2 import Environment, FileSystemLoader from jinja2.utils import next def parse_rst(state, content_offset, doc): node = nodes.section() # hack around title style bookkeeping surrounding_title_styles = state.memo.title_styles surrounding_section_level = state.memo.section_level state.memo.title_styles = [] state.memo.section_level = 0 state.nested_parse(doc, content_offset, node, match_titles=1) state.memo.title_styles = surrounding_title_styles state.memo.section_level = surrounding_section_level return node.children class JinjaStyle(Style): title = 'Jinja Style' default_style = "" styles = { Comment: 'italic #aaaaaa', Comment.Preproc: 'noitalic #B11414', Comment.Special: 'italic #505050', Keyword: 'bold #B80000', Keyword.Type: '#808080', Operator.Word: 'bold #B80000', Name.Builtin: '#333333', Name.Function: '#333333', Name.Class: 'bold #333333', Name.Namespace: 'bold #333333', Name.Entity: 'bold #363636', Name.Attribute: '#686868', Name.Tag: 'bold #686868', Name.Decorator: '#686868', String: '#AA891C', Number: '#444444', Generic.Heading: 'bold #000080', Generic.Subheading: 'bold #800080', Generic.Deleted: '#aa0000', Generic.Inserted: '#00aa00', Generic.Error: '#aa0000', Generic.Emph: 'italic', Generic.Strong: 'bold', Generic.Prompt: '#555555', Generic.Output: '#888888', Generic.Traceback: '#aa0000', Error: '#F00 bg:#FAA' } _sig_re = re.compile(r'^[a-zA-Z_][a-zA-Z0-9_]*(\(.*?\))') def format_function(name, aliases, func): lines = inspect.getdoc(func).splitlines() signature = '()' if isinstance(func, BuiltinFunctionType): match = _sig_re.match(lines[0]) if match is not None: del lines[:1 + bool(lines and not lines[0])] signature = match.group(1) else: try: argspec = inspect.getargspec(func) if getattr(func, 'environmentfilter', False) or \ getattr(func, 'contextfilter', False) or \ getattr(func, 'evalcontextfilter', False): del argspec[0][0] signature = inspect.formatargspec(*argspec) except: pass result = ['.. function:: %s%s' % (name, signature), ''] result.extend(' ' + line for line in lines) if aliases: result.extend(('', ' :aliases: %s' % ', '.join( '``%s``' % x for x in sorted(aliases)))) return result def dump_functions(mapping): def directive(dirname, arguments, options, content, lineno, content_offset, block_text, state, state_machine): reverse_mapping = {} for name, func in mapping.items(): reverse_mapping.setdefault(func, []).append(name) filters = [] for func, names in reverse_mapping.items(): aliases = sorted(names, key=lambda x: len(x)) name = aliases.pop() filters.append((name, aliases, func)) filters.sort() result = ViewList() for name, aliases, func in filters: for item in format_function(name, aliases, func): result.append(item, '<jinjaext>') node = nodes.paragraph() state.nested_parse(result, content_offset, node) return node.children return directive from jinja2.defaults import DEFAULT_FILTERS, DEFAULT_TESTS jinja_filters = dump_functions(DEFAULT_FILTERS) jinja_tests = dump_functions(DEFAULT_TESTS) def jinja_nodes(dirname, arguments, options, content, lineno, content_offset, block_text, state, state_machine): from jinja2.nodes import Node doc = ViewList() def walk(node, indent): p = ' ' * indent sig = ', '.join(node.fields) doc.append(p + '.. autoclass:: %s(%s)' % (node.__name__, sig), '') if node.abstract: members = [] for key, name in node.__dict__.items(): if not key.startswith('_') and \ not hasattr(node.__base__, key) and isinstance(name, collections.Callable): members.append(key) if members: members.sort() doc.append('%s :members: %s' % (p, ', '.join(members)), '') if node.__base__ != object: doc.append('', '') doc.append('%s :Node type: :class:`%s`' % (p, node.__base__.__name__), '') doc.append('', '') children = node.__subclasses__() children.sort(key=lambda x: x.__name__.lower()) for child in children: walk(child, indent) walk(Node, 0) return parse_rst(state, content_offset, doc) def inject_toc(app, doctree, docname): titleiter = iter(doctree.traverse(nodes.title)) try: # skip first title, we are not interested in that one next(titleiter) title = next(titleiter) # and check if there is at least another title next(titleiter) except StopIteration: return tocnode = nodes.section('') tocnode['classes'].append('toc') toctitle = nodes.section('') toctitle['classes'].append('toctitle') toctitle.append(nodes.title(text='Table Of Contents')) tocnode.append(toctitle) tocnode += doctree.document.settings.env.get_toc_for(docname)[0][1] title.parent.insert(title.parent.children.index(title), tocnode) def setup(app): app.add_directive('jinjafilters', jinja_filters, 0, (0, 0, 0)) app.add_directive('jinjatests', jinja_tests, 0, (0, 0, 0)) app.add_directive('jinjanodes', jinja_nodes, 0, (0, 0, 0)) # uncomment for inline toc. links are broken unfortunately ##app.connect('doctree-resolved', inject_toc)
mit
catherinemoresco/feedme
venv/lib/python2.7/site-packages/gunicorn/app/base.py
24
4153
# -*- coding: utf-8 - # # This file is part of gunicorn released under the MIT license. # See the NOTICE for more information. import os import sys import traceback from gunicorn import util from gunicorn.arbiter import Arbiter from gunicorn.config import Config, get_default_config_file from gunicorn import debug from gunicorn.six import execfile_ class Application(object): """\ An application interface for configuring and loading the various necessities for any given web framework. """ def __init__(self, usage=None, prog=None): self.usage = usage self.cfg = None self.callable = None self.prog = prog self.logger = None self.do_load_config() def do_load_config(self): try: self.load_config() except Exception as e: sys.stderr.write("\nError: %s\n" % str(e)) sys.stderr.flush() sys.exit(1) def load_config_from_file(self, filename): if not os.path.exists(filename): raise RuntimeError("%r doesn't exist" % filename) cfg = { "__builtins__": __builtins__, "__name__": "__config__", "__file__": filename, "__doc__": None, "__package__": None } try: execfile_(filename, cfg, cfg) except Exception: print("Failed to read config file: %s" % filename) traceback.print_exc() sys.exit(1) for k, v in cfg.items(): # Ignore unknown names if k not in self.cfg.settings: continue try: self.cfg.set(k.lower(), v) except: sys.stderr.write("Invalid value for %s: %s\n\n" % (k, v)) raise return cfg def load_config(self): # init configuration self.cfg = Config(self.usage, prog=self.prog) # parse console args parser = self.cfg.parser() args = parser.parse_args() # optional settings from apps cfg = self.init(parser, args, args.args) # Load up the any app specific configuration if cfg and cfg is not None: for k, v in cfg.items(): self.cfg.set(k.lower(), v) if args.config: self.load_config_from_file(args.config) else: default_config = get_default_config_file() if default_config is not None: self.load_config_from_file(default_config) # Lastly, update the configuration with any command line # settings. for k, v in args.__dict__.items(): if v is None: continue if k == "args": continue self.cfg.set(k.lower(), v) def init(self, parser, opts, args): raise NotImplementedError def load(self): raise NotImplementedError def reload(self): self.do_load_config() if self.cfg.spew: debug.spew() def wsgi(self): if self.callable is None: self.callable = self.load() return self.callable def run(self): if self.cfg.check_config: try: self.load() except: sys.stderr.write("\nError while loading the application:\n\n") traceback.print_exc() sys.stderr.flush() sys.exit(1) sys.exit(0) if self.cfg.spew: debug.spew() if self.cfg.daemon: util.daemonize(self.cfg.enable_stdio_inheritance) # set python paths if self.cfg.pythonpath and self.cfg.pythonpath is not None: paths = self.cfg.pythonpath.split(",") for path in paths: pythonpath = os.path.abspath(path) if pythonpath not in sys.path: sys.path.insert(0, pythonpath) try: Arbiter(self).run() except RuntimeError as e: sys.stderr.write("\nError: %s\n\n" % e) sys.stderr.flush() sys.exit(1)
gpl-2.0
snasoft/QtCreatorPluginsPack
Bin/3rdParty/vera/bin/lib/ast.py
255
11805
# -*- coding: utf-8 -*- """ ast ~~~ The `ast` module helps Python applications to process trees of the Python abstract syntax grammar. The abstract syntax itself might change with each Python release; this module helps to find out programmatically what the current grammar looks like and allows modifications of it. An abstract syntax tree can be generated by passing `ast.PyCF_ONLY_AST` as a flag to the `compile()` builtin function or by using the `parse()` function from this module. The result will be a tree of objects whose classes all inherit from `ast.AST`. A modified abstract syntax tree can be compiled into a Python code object using the built-in `compile()` function. Additionally various helper functions are provided that make working with the trees simpler. The main intention of the helper functions and this module in general is to provide an easy to use interface for libraries that work tightly with the python syntax (template engines for example). :copyright: Copyright 2008 by Armin Ronacher. :license: Python License. """ from _ast import * from _ast import __version__ def parse(source, filename='<unknown>', mode='exec'): """ Parse the source into an AST node. Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). """ return compile(source, filename, mode, PyCF_ONLY_AST) def literal_eval(node_or_string): """ Safely evaluate an expression node or a string containing a Python expression. The string or node provided may only consist of the following Python literal structures: strings, numbers, tuples, lists, dicts, booleans, and None. """ _safe_names = {'None': None, 'True': True, 'False': False} if isinstance(node_or_string, basestring): node_or_string = parse(node_or_string, mode='eval') if isinstance(node_or_string, Expression): node_or_string = node_or_string.body def _convert(node): if isinstance(node, Str): return node.s elif isinstance(node, Num): return node.n elif isinstance(node, Tuple): return tuple(map(_convert, node.elts)) elif isinstance(node, List): return list(map(_convert, node.elts)) elif isinstance(node, Dict): return dict((_convert(k), _convert(v)) for k, v in zip(node.keys, node.values)) elif isinstance(node, Name): if node.id in _safe_names: return _safe_names[node.id] elif isinstance(node, BinOp) and \ isinstance(node.op, (Add, Sub)) and \ isinstance(node.right, Num) and \ isinstance(node.right.n, complex) and \ isinstance(node.left, Num) and \ isinstance(node.left.n, (int, long, float)): left = node.left.n right = node.right.n if isinstance(node.op, Add): return left + right else: return left - right raise ValueError('malformed string') return _convert(node_or_string) def dump(node, annotate_fields=True, include_attributes=False): """ Return a formatted dump of the tree in *node*. This is mainly useful for debugging purposes. The returned string will show the names and the values for fields. This makes the code impossible to evaluate, so if evaluation is wanted *annotate_fields* must be set to False. Attributes such as line numbers and column offsets are not dumped by default. If this is wanted, *include_attributes* can be set to True. """ def _format(node): if isinstance(node, AST): fields = [(a, _format(b)) for a, b in iter_fields(node)] rv = '%s(%s' % (node.__class__.__name__, ', '.join( ('%s=%s' % field for field in fields) if annotate_fields else (b for a, b in fields) )) if include_attributes and node._attributes: rv += fields and ', ' or ' ' rv += ', '.join('%s=%s' % (a, _format(getattr(node, a))) for a in node._attributes) return rv + ')' elif isinstance(node, list): return '[%s]' % ', '.join(_format(x) for x in node) return repr(node) if not isinstance(node, AST): raise TypeError('expected AST, got %r' % node.__class__.__name__) return _format(node) def copy_location(new_node, old_node): """ Copy source location (`lineno` and `col_offset` attributes) from *old_node* to *new_node* if possible, and return *new_node*. """ for attr in 'lineno', 'col_offset': if attr in old_node._attributes and attr in new_node._attributes \ and hasattr(old_node, attr): setattr(new_node, attr, getattr(old_node, attr)) return new_node def fix_missing_locations(node): """ When you compile a node tree with compile(), the compiler expects lineno and col_offset attributes for every node that supports them. This is rather tedious to fill in for generated nodes, so this helper adds these attributes recursively where not already set, by setting them to the values of the parent node. It works recursively starting at *node*. """ def _fix(node, lineno, col_offset): if 'lineno' in node._attributes: if not hasattr(node, 'lineno'): node.lineno = lineno else: lineno = node.lineno if 'col_offset' in node._attributes: if not hasattr(node, 'col_offset'): node.col_offset = col_offset else: col_offset = node.col_offset for child in iter_child_nodes(node): _fix(child, lineno, col_offset) _fix(node, 1, 0) return node def increment_lineno(node, n=1): """ Increment the line number of each node in the tree starting at *node* by *n*. This is useful to "move code" to a different location in a file. """ for child in walk(node): if 'lineno' in child._attributes: child.lineno = getattr(child, 'lineno', 0) + n return node def iter_fields(node): """ Yield a tuple of ``(fieldname, value)`` for each field in ``node._fields`` that is present on *node*. """ for field in node._fields: try: yield field, getattr(node, field) except AttributeError: pass def iter_child_nodes(node): """ Yield all direct child nodes of *node*, that is, all fields that are nodes and all items of fields that are lists of nodes. """ for name, field in iter_fields(node): if isinstance(field, AST): yield field elif isinstance(field, list): for item in field: if isinstance(item, AST): yield item def get_docstring(node, clean=True): """ Return the docstring for the given node or None if no docstring can be found. If the node provided does not have docstrings a TypeError will be raised. """ if not isinstance(node, (FunctionDef, ClassDef, Module)): raise TypeError("%r can't have docstrings" % node.__class__.__name__) if node.body and isinstance(node.body[0], Expr) and \ isinstance(node.body[0].value, Str): if clean: import inspect return inspect.cleandoc(node.body[0].value.s) return node.body[0].value.s def walk(node): """ Recursively yield all descendant nodes in the tree starting at *node* (including *node* itself), in no specified order. This is useful if you only want to modify nodes in place and don't care about the context. """ from collections import deque todo = deque([node]) while todo: node = todo.popleft() todo.extend(iter_child_nodes(node)) yield node class NodeVisitor(object): """ A node visitor base class that walks the abstract syntax tree and calls a visitor function for every node found. This function may return a value which is forwarded by the `visit` method. This class is meant to be subclassed, with the subclass adding visitor methods. Per default the visitor functions for the nodes are ``'visit_'`` + class name of the node. So a `TryFinally` node visit function would be `visit_TryFinally`. This behavior can be changed by overriding the `visit` method. If no visitor function exists for a node (return value `None`) the `generic_visit` visitor is used instead. Don't use the `NodeVisitor` if you want to apply changes to nodes during traversing. For this a special visitor exists (`NodeTransformer`) that allows modifications. """ def visit(self, node): """Visit a node.""" method = 'visit_' + node.__class__.__name__ visitor = getattr(self, method, self.generic_visit) return visitor(node) def generic_visit(self, node): """Called if no explicit visitor function exists for a node.""" for field, value in iter_fields(node): if isinstance(value, list): for item in value: if isinstance(item, AST): self.visit(item) elif isinstance(value, AST): self.visit(value) class NodeTransformer(NodeVisitor): """ A :class:`NodeVisitor` subclass that walks the abstract syntax tree and allows modification of nodes. The `NodeTransformer` will walk the AST and use the return value of the visitor methods to replace or remove the old node. If the return value of the visitor method is ``None``, the node will be removed from its location, otherwise it is replaced with the return value. The return value may be the original node in which case no replacement takes place. Here is an example transformer that rewrites all occurrences of name lookups (``foo``) to ``data['foo']``:: class RewriteName(NodeTransformer): def visit_Name(self, node): return copy_location(Subscript( value=Name(id='data', ctx=Load()), slice=Index(value=Str(s=node.id)), ctx=node.ctx ), node) Keep in mind that if the node you're operating on has child nodes you must either transform the child nodes yourself or call the :meth:`generic_visit` method for the node first. For nodes that were part of a collection of statements (that applies to all statement nodes), the visitor may also return a list of nodes rather than just a single node. Usually you use the transformer like this:: node = YourTransformer().visit(node) """ def generic_visit(self, node): for field, old_value in iter_fields(node): old_value = getattr(node, field, None) if isinstance(old_value, list): new_values = [] for value in old_value: if isinstance(value, AST): value = self.visit(value) if value is None: continue elif not isinstance(value, AST): new_values.extend(value) continue new_values.append(value) old_value[:] = new_values elif isinstance(old_value, AST): new_node = self.visit(old_value) if new_node is None: delattr(node, field) else: setattr(node, field, new_node) return node
lgpl-3.0
mezz64/home-assistant
homeassistant/components/xiaomi/device_tracker.py
12
5680
"""Support for Xiaomi Mi routers.""" import logging import requests import voluptuous as vol from homeassistant.components.device_tracker import ( DOMAIN, PLATFORM_SCHEMA, DeviceScanner, ) from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, HTTP_OK import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_USERNAME, default="admin"): cv.string, vol.Required(CONF_PASSWORD): cv.string, } ) def get_scanner(hass, config): """Validate the configuration and return a Xiaomi Device Scanner.""" scanner = XiaomiDeviceScanner(config[DOMAIN]) return scanner if scanner.success_init else None class XiaomiDeviceScanner(DeviceScanner): """This class queries a Xiaomi Mi router. Adapted from Luci scanner. """ def __init__(self, config): """Initialize the scanner.""" self.host = config[CONF_HOST] self.username = config[CONF_USERNAME] self.password = config[CONF_PASSWORD] self.last_results = {} self.token = _get_token(self.host, self.username, self.password) self.mac2name = None self.success_init = self.token is not None def scan_devices(self): """Scan for new devices and return a list with found device IDs.""" self._update_info() return self.last_results def get_device_name(self, device): """Return the name of the given device or None if we don't know.""" if self.mac2name is None: result = self._retrieve_list_with_retry() if result: hosts = [x for x in result if "mac" in x and "name" in x] mac2name_list = [(x["mac"].upper(), x["name"]) for x in hosts] self.mac2name = dict(mac2name_list) else: # Error, handled in the _retrieve_list_with_retry return return self.mac2name.get(device.upper(), None) def _update_info(self): """Ensure the information from the router are up to date. Returns true if scanning successful. """ if not self.success_init: return False result = self._retrieve_list_with_retry() if result: self._store_result(result) return True return False def _retrieve_list_with_retry(self): """Retrieve the device list with a retry if token is invalid. Return the list if successful. """ _LOGGER.info("Refreshing device list") result = _retrieve_list(self.host, self.token) if result: return result _LOGGER.info("Refreshing token and retrying device list refresh") self.token = _get_token(self.host, self.username, self.password) return _retrieve_list(self.host, self.token) def _store_result(self, result): """Extract and store the device list in self.last_results.""" self.last_results = [] for device_entry in result: # Check if the device is marked as connected if int(device_entry["online"]) == 1: self.last_results.append(device_entry["mac"]) def _retrieve_list(host, token, **kwargs): """Get device list for the given host.""" url = "http://{}/cgi-bin/luci/;stok={}/api/misystem/devicelist" url = url.format(host, token) try: res = requests.get(url, timeout=5, **kwargs) except requests.exceptions.Timeout: _LOGGER.exception("Connection to the router timed out at URL %s", url) return if res.status_code != HTTP_OK: _LOGGER.exception("Connection failed with http code %s", res.status_code) return try: result = res.json() except ValueError: # If json decoder could not parse the response _LOGGER.exception("Failed to parse response from mi router") return try: xiaomi_code = result["code"] except KeyError: _LOGGER.exception("No field code in response from mi router. %s", result) return if xiaomi_code == 0: try: return result["list"] except KeyError: _LOGGER.exception("No list in response from mi router. %s", result) return else: _LOGGER.info( "Receive wrong Xiaomi code %s, expected 0 in response %s", xiaomi_code, result, ) return def _get_token(host, username, password): """Get authentication token for the given host+username+password.""" url = f"http://{host}/cgi-bin/luci/api/xqsystem/login" data = {"username": username, "password": password} try: res = requests.post(url, data=data, timeout=5) except requests.exceptions.Timeout: _LOGGER.exception("Connection to the router timed out") return if res.status_code == HTTP_OK: try: result = res.json() except ValueError: # If JSON decoder could not parse the response _LOGGER.exception("Failed to parse response from mi router") return try: return result["token"] except KeyError: error_message = ( "Xiaomi token cannot be refreshed, response from " + "url: [%s] \nwith parameter: [%s] \nwas: [%s]" ) _LOGGER.exception(error_message, url, data, result) return else: _LOGGER.error( "Invalid response: [%s] at url: [%s] with data [%s]", res, url, data )
apache-2.0
xiaolonginfo/decode-Django
Django-1.5.1/tests/regressiontests/expressions_regress/tests.py
46
15966
""" Spanning tests for all the operations that F() expressions can perform. """ from __future__ import absolute_import import datetime from django.db import connection from django.db.models import F from django.test import TestCase, Approximate, skipUnlessDBFeature from .models import Number, Experiment class ExpressionsRegressTests(TestCase): def setUp(self): Number(integer=-1).save() Number(integer=42).save() Number(integer=1337).save() self.assertEqual(Number.objects.update(float=F('integer')), 3) def test_fill_with_value_from_same_object(self): """ We can fill a value in all objects with an other value of the same object. """ self.assertQuerysetEqual( Number.objects.all(), [ '<Number: -1, -1.000>', '<Number: 42, 42.000>', '<Number: 1337, 1337.000>' ] ) def test_increment_value(self): """ We can increment a value of all objects in a query set. """ self.assertEqual( Number.objects.filter(integer__gt=0) .update(integer=F('integer') + 1), 2) self.assertQuerysetEqual( Number.objects.all(), [ '<Number: -1, -1.000>', '<Number: 43, 42.000>', '<Number: 1338, 1337.000>' ] ) def test_filter_not_equals_other_field(self): """ We can filter for objects, where a value is not equals the value of an other field. """ self.assertEqual( Number.objects.filter(integer__gt=0) .update(integer=F('integer') + 1), 2) self.assertQuerysetEqual( Number.objects.exclude(float=F('integer')), [ '<Number: 43, 42.000>', '<Number: 1338, 1337.000>' ] ) def test_complex_expressions(self): """ Complex expressions of different connection types are possible. """ n = Number.objects.create(integer=10, float=123.45) self.assertEqual(Number.objects.filter(pk=n.pk) .update(float=F('integer') + F('float') * 2), 1) self.assertEqual(Number.objects.get(pk=n.pk).integer, 10) self.assertEqual(Number.objects.get(pk=n.pk).float, Approximate(256.900, places=3)) class ExpressionOperatorTests(TestCase): def setUp(self): self.n = Number.objects.create(integer=42, float=15.5) def test_lefthand_addition(self): # LH Addition of floats and integers Number.objects.filter(pk=self.n.pk).update( integer=F('integer') + 15, float=F('float') + 42.7 ) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3)) def test_lefthand_subtraction(self): # LH Subtraction of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') - 15, float=F('float') - 42.7) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(-27.200, places=3)) def test_lefthand_multiplication(self): # Multiplication of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') * 15, float=F('float') * 42.7) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3)) def test_lefthand_division(self): # LH Division of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') / 2, float=F('float') / 42.7) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 21) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(0.363, places=3)) def test_lefthand_modulo(self): # LH Modulo arithmetic on integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') % 20) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 2) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3)) def test_lefthand_bitwise_and(self): # LH Bitwise ands on integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer').bitand(56)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 40) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3)) @skipUnlessDBFeature('supports_bitwise_or') def test_lefthand_bitwise_or(self): # LH Bitwise or on integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer').bitor(48)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 58) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3)) def test_right_hand_addition(self): # Right hand operators Number.objects.filter(pk=self.n.pk).update(integer=15 + F('integer'), float=42.7 + F('float')) # RH Addition of floats and integers self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3)) def test_right_hand_subtraction(self): Number.objects.filter(pk=self.n.pk).update(integer=15 - F('integer'), float=42.7 - F('float')) # RH Subtraction of floats and integers self.assertEqual(Number.objects.get(pk=self.n.pk).integer, -27) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(27.200, places=3)) def test_right_hand_multiplication(self): # RH Multiplication of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=15 * F('integer'), float=42.7 * F('float')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3)) def test_right_hand_division(self): # RH Division of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=640 / F('integer'), float=42.7 / F('float')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 15) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(2.755, places=3)) def test_right_hand_modulo(self): # RH Modulo arithmetic on integers Number.objects.filter(pk=self.n.pk).update(integer=69 % F('integer')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3)) class FTimeDeltaTests(TestCase): def setUp(self): sday = datetime.date(2010, 6, 25) stime = datetime.datetime(2010, 6, 25, 12, 15, 30, 747000) midnight = datetime.time(0) delta0 = datetime.timedelta(0) delta1 = datetime.timedelta(microseconds=253000) delta2 = datetime.timedelta(seconds=44) delta3 = datetime.timedelta(hours=21, minutes=8) delta4 = datetime.timedelta(days=10) # Test data is set so that deltas and delays will be # strictly increasing. self.deltas = [] self.delays = [] self.days_long = [] # e0: started same day as assigned, zero duration end = stime+delta0 e0 = Experiment.objects.create(name='e0', assigned=sday, start=stime, end=end, completed=end.date()) self.deltas.append(delta0) self.delays.append(e0.start- datetime.datetime.combine(e0.assigned, midnight)) self.days_long.append(e0.completed-e0.assigned) # e1: started one day after assigned, tiny duration, data # set so that end time has no fractional seconds, which # tests an edge case on sqlite. This Experiment is only # included in the test data when the DB supports microsecond # precision. if connection.features.supports_microsecond_precision: delay = datetime.timedelta(1) end = stime + delay + delta1 e1 = Experiment.objects.create(name='e1', assigned=sday, start=stime+delay, end=end, completed=end.date()) self.deltas.append(delta1) self.delays.append(e1.start- datetime.datetime.combine(e1.assigned, midnight)) self.days_long.append(e1.completed-e1.assigned) # e2: started three days after assigned, small duration end = stime+delta2 e2 = Experiment.objects.create(name='e2', assigned=sday-datetime.timedelta(3), start=stime, end=end, completed=end.date()) self.deltas.append(delta2) self.delays.append(e2.start- datetime.datetime.combine(e2.assigned, midnight)) self.days_long.append(e2.completed-e2.assigned) # e3: started four days after assigned, medium duration delay = datetime.timedelta(4) end = stime + delay + delta3 e3 = Experiment.objects.create(name='e3', assigned=sday, start=stime+delay, end=end, completed=end.date()) self.deltas.append(delta3) self.delays.append(e3.start- datetime.datetime.combine(e3.assigned, midnight)) self.days_long.append(e3.completed-e3.assigned) # e4: started 10 days after assignment, long duration end = stime + delta4 e4 = Experiment.objects.create(name='e4', assigned=sday-datetime.timedelta(10), start=stime, end=end, completed=end.date()) self.deltas.append(delta4) self.delays.append(e4.start- datetime.datetime.combine(e4.assigned, midnight)) self.days_long.append(e4.completed-e4.assigned) self.expnames = [e.name for e in Experiment.objects.all()] def test_delta_add(self): for i in range(len(self.deltas)): delta = self.deltas[i] test_set = [e.name for e in Experiment.objects.filter(end__lt=F('start')+delta)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(end__lte=F('start')+delta)] self.assertEqual(test_set, self.expnames[:i+1]) def test_delta_subtract(self): for i in range(len(self.deltas)): delta = self.deltas[i] test_set = [e.name for e in Experiment.objects.filter(start__gt=F('end')-delta)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(start__gte=F('end')-delta)] self.assertEqual(test_set, self.expnames[:i+1]) def test_exclude(self): for i in range(len(self.deltas)): delta = self.deltas[i] test_set = [e.name for e in Experiment.objects.exclude(end__lt=F('start')+delta)] self.assertEqual(test_set, self.expnames[i:]) test_set = [e.name for e in Experiment.objects.exclude(end__lte=F('start')+delta)] self.assertEqual(test_set, self.expnames[i+1:]) def test_date_comparison(self): for i in range(len(self.days_long)): days = self.days_long[i] test_set = [e.name for e in Experiment.objects.filter(completed__lt=F('assigned')+days)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(completed__lte=F('assigned')+days)] self.assertEqual(test_set, self.expnames[:i+1]) @skipUnlessDBFeature("supports_mixed_date_datetime_comparisons") def test_mixed_comparisons1(self): for i in range(len(self.delays)): delay = self.delays[i] if not connection.features.supports_microsecond_precision: delay = datetime.timedelta(delay.days, delay.seconds) test_set = [e.name for e in Experiment.objects.filter(assigned__gt=F('start')-delay)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(assigned__gte=F('start')-delay)] self.assertEqual(test_set, self.expnames[:i+1]) def test_mixed_comparisons2(self): delays = [datetime.timedelta(delay.days) for delay in self.delays] for i in range(len(delays)): delay = delays[i] test_set = [e.name for e in Experiment.objects.filter(start__lt=F('assigned')+delay)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(start__lte=F('assigned')+delay+ datetime.timedelta(1))] self.assertEqual(test_set, self.expnames[:i+1]) def test_delta_update(self): for i in range(len(self.deltas)): delta = self.deltas[i] exps = Experiment.objects.all() expected_durations = [e.duration() for e in exps] expected_starts = [e.start+delta for e in exps] expected_ends = [e.end+delta for e in exps] Experiment.objects.update(start=F('start')+delta, end=F('end')+delta) exps = Experiment.objects.all() new_starts = [e.start for e in exps] new_ends = [e.end for e in exps] new_durations = [e.duration() for e in exps] self.assertEqual(expected_starts, new_starts) self.assertEqual(expected_ends, new_ends) self.assertEqual(expected_durations, new_durations) def test_delta_invalid_op_mult(self): raised = False try: r = repr(Experiment.objects.filter(end__lt=F('start')*self.deltas[0])) except TypeError: raised = True self.assertTrue(raised, "TypeError not raised on attempt to multiply datetime by timedelta.") def test_delta_invalid_op_div(self): raised = False try: r = repr(Experiment.objects.filter(end__lt=F('start')/self.deltas[0])) except TypeError: raised = True self.assertTrue(raised, "TypeError not raised on attempt to divide datetime by timedelta.") def test_delta_invalid_op_mod(self): raised = False try: r = repr(Experiment.objects.filter(end__lt=F('start')%self.deltas[0])) except TypeError: raised = True self.assertTrue(raised, "TypeError not raised on attempt to modulo divide datetime by timedelta.") def test_delta_invalid_op_and(self): raised = False try: r = repr(Experiment.objects.filter(end__lt=F('start').bitand(self.deltas[0]))) except TypeError: raised = True self.assertTrue(raised, "TypeError not raised on attempt to binary and a datetime with a timedelta.") def test_delta_invalid_op_or(self): raised = False try: r = repr(Experiment.objects.filter(end__lt=F('start').bitor(self.deltas[0]))) except TypeError: raised = True self.assertTrue(raised, "TypeError not raised on attempt to binary or a datetime with a timedelta.")
gpl-2.0