Merge with 357990a5f8cc85fd4181b2d8d0556b6c05fcf287
authorYves-Marie Haussonne <1218002+ymph@users.noreply.github.com>
Fri, 10 May 2013 13:34:40 +0200
changeset 893 10a19dd4e1c9
parent 877 41ce1c341abe (current diff)
parent 891 8628c590f608 (diff)
child 894 cba4554e9c03
Merge with 357990a5f8cc85fd4181b2d8d0556b6c05fcf287
script/lib/tweetstream/CHANGELOG
script/lib/tweetstream/LICENSE
script/lib/tweetstream/README
script/lib/tweetstream/conftest.py
script/lib/tweetstream/servercontext.py
script/lib/tweetstream/setup.py
script/lib/tweetstream/tests/test_tweetstream.py
script/lib/tweetstream/tox.ini
script/lib/tweetstream/tweetstream/__init__.py
script/lib/tweetstream/tweetstream/deprecated.py
script/lib/tweetstream/tweetstream/streamclasses.py
script/rest/enmi_profile.py
script/stream/recorder_stream.py
script/stream/recorder_tweetstream.py
script/virtualenv/res/credential.txt
script/virtualenv/res/src/SQLAlchemy-0.8.0b1.tar.gz
script/virtualenv/res/src/distribute-0.6.32.tar.gz
script/virtualenv/res/src/httplib2-0.7.7.tar.gz
script/virtualenv/res/src/lxml-3.0.1.tar.gz
script/virtualenv/res/src/lxml-3.1.2.tar.gz
script/virtualenv/res/src/psycopg2-2.4.5.tar.gz
script/virtualenv/res/src/pytz-2012h.tar.bz2
script/virtualenv/res/src/requests-1.1.0.tar.gz
script/virtualenv/res/src/simplejson-2.6.2.tar.gz
script/virtualenv/res/src/tweepy-1.12.tar.gz
script/virtualenv/res/src/twitter-1.9.0.tar.gz
script/virtualenv/script/virtualenv.py
script/virtualenv/script/virtualenv_support/distribute-0.6.31.tar.gz
script/virtualenv/script/virtualenv_support/pip-1.2.1.tar.gz
--- a/.pydevproject	Tue May 07 18:28:26 2013 +0200
+++ b/.pydevproject	Fri May 10 13:34:40 2013 +0200
@@ -1,9 +1,11 @@
 <?xml version="1.0" encoding="UTF-8" standalone="no"?>
 <?eclipse-pydev version="1.0"?><pydev_project>
-<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
-<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.6</pydev_property>
+<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">python_tl</pydev_property>
+<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.7</pydev_property>
 <pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
-<path>/tweet_live/script/lib</path>
-<path>/tweet_live/script/lib/tweetstream</path>
+<path>/tweet_live/script/lib/iri_tweet</path>
+<path>/tweet_live/script/stream</path>
+<path>/tweet_live/script/rest</path>
+<path>/tweet_live/script/utils</path>
 </pydev_pathproperty>
 </pydev_project>
--- a/sbin/sync/sync_live	Tue May 07 18:28:26 2013 +0200
+++ b/sbin/sync/sync_live	Fri May 10 13:34:40 2013 +0200
@@ -9,7 +9,8 @@
 #text2unix ~/tmp/tweet_live_V$1
 
 if [ -d ~/tmp/tweet_live_V$1 ]; then
-    cat <<EOT | rsync -Cvrlz --delete --filter=". -" ~/tmp/tweet_live_V$1/web/ iri@www.iri.centrepompidou.fr:/home/polemictweet/
+#    cat <<EOT | rsync -Cvrlz --delete --filter=". -" ~/tmp/tweet_live_V$1/web/ iri@www.iri.centrepompidou.fr:/home/polemictweet/
+    cat <<EOT | rsync -Cvrlz --delete --filter=". -" ~/tmp/tweet_live_V$1/web/ iri@ftv.iri-research.org:/srv/www/pt/
 + core
 P config.php
 P .htaccess
@@ -23,4 +24,4 @@
     rm -fr ~/tmp/tweet_live_V$1;
 fi
 
-ssh iri@www.iri.centrepompidou.fr sudo apache2ctl restart
+#ssh iri@www.iri.centrepompidou.fr sudo apache2ctl restart
--- a/script/lib/iri_tweet/iri_tweet/__init__.py	Tue May 07 18:28:26 2013 +0200
+++ b/script/lib/iri_tweet/iri_tweet/__init__.py	Fri May 10 13:34:40 2013 +0200
@@ -21,3 +21,34 @@
 __contact__ = "ymh.work@gmail.com"
 __homepage__ = ""
 __docformat__ = "restructuredtext"
+
+
+"""
+ .. data:: USER_AGENT
+
+     The default user agent string for stream objects
+"""
+
+USER_AGENT = "IRITweet %s" % __version__
+
+
+class IRITweetError(Exception):
+    """Base class for all IRITweet errors"""
+    pass
+
+
+class AuthenticationError(IRITweetError):
+    """Exception raised if the username/password is not accepted"""
+    pass
+
+
+class ConnectionError(IRITweetError):
+    """Raised when there are network problems. This means when there are
+    dns errors, network errors, twitter issues"""
+
+    def __init__(self, reason, details=None):
+        self.reason = reason
+        self.details = details
+
+    def __str__(self):
+        return '<ConnectionError %s>' % self.reason
--- a/script/lib/iri_tweet/iri_tweet/models.py	Tue May 07 18:28:26 2013 +0200
+++ b/script/lib/iri_tweet/iri_tweet/models.py	Fri May 10 13:34:40 2013 +0200
@@ -1,5 +1,6 @@
 from sqlalchemy import (Boolean, Column, Enum, BigInteger, Integer, String, 
-    ForeignKey, DateTime, create_engine)
+    ForeignKey, DateTime, create_engine, event)
+from sqlalchemy.engine import Engine
 from sqlalchemy.ext.declarative import declarative_base
 from sqlalchemy.orm import relationship, sessionmaker
 import anyjson
@@ -11,12 +12,10 @@
 Base = declarative_base()
 
 APPLICATION_NAME = "IRI_TWITTER" 
-CONSUMER_KEY = "54ThDZhpEjokcMgHJOMnQA"
-CONSUMER_SECRET = "wUoL9UL2T87tfc97R0Dff2EaqRzpJ5XGdmaN2XK3udA"
+#CONSUMER_KEY = "54ThDZhpEjokcMgHJOMnQA"
+#CONSUMER_SECRET = "wUoL9UL2T87tfc97R0Dff2EaqRzpJ5XGdmaN2XK3udA"
 ACCESS_TOKEN_KEY = None
 ACCESS_TOKEN_SECRET = None
-#ACCESS_TOKEN_KEY= "47312923-LiNTtz0I18YXMVIrFeTuhmH7bOvYsK6p3Ln2Dc"
-#ACCESS_TOKEN_SECRET = "r3LoXVcjImNAElUpWqTu2SG2xCdWFHkva7xeQoncA"
 
 def adapt_date(date_str):
     ts = email.utils.parsedate_tz(date_str) #@UndefinedVariable
@@ -32,7 +31,7 @@
             
     def __init__(cls, name, bases, ns): #@NoSelf
         def init(self, **kwargs):
-            for key, value in kwargs.items():
+            for key, value in kwargs.iteritems():
                 if hasattr(self, key):
                     setattr(self, key, value)
             super(cls, self).__init__()
@@ -82,6 +81,14 @@
         'OK' : 1,
         'ERROR' : 2,
         'NOT_TWEET': 3,
+        'DELETE': 4,
+        'SCRUB_GEO': 5,
+        'LIMIT': 6,
+        'STATUS_WITHHELD': 7,
+        'USER_WITHHELD': 8,
+        'DISCONNECT': 9,
+        'STALL_WARNING': 10,
+        'DELETE_PENDING': 4
     }
     __metaclass__ = TweetMeta
     
@@ -90,6 +97,7 @@
     ts = Column(DateTime, default=datetime.datetime.utcnow, index=True)
     tweet_source_id = Column(Integer, ForeignKey('tweet_tweet_source.id'))
     tweet_source = relationship("TweetSource", backref="logs")
+    status_id = Column(BigInteger, index=True, nullable=True, default=None)
     status = Column(Integer)
     error = Column(String)
     error_stack = Column(String)
@@ -121,7 +129,7 @@
     user = relationship("User", backref="tweets")
     tweet_source_id = Column(Integer, ForeignKey('tweet_tweet_source.id'))
     tweet_source = relationship("TweetSource", backref="tweet")
-    entity_list = relationship(Entity, backref='tweet')
+    entity_list = relationship(Entity, backref='tweet', cascade="all, delete-orphan")
     received_at = Column(DateTime, default=datetime.datetime.utcnow, index=True)
         
 
@@ -266,9 +274,17 @@
     
     session_argname = [ 'autoflush','binds', "class_", "_enable_transaction_accounting","expire_on_commit", "extension", "query_cls", "twophase", "weak_identity_map", "autocommit"]
     
-    kwargs_ce = dict((k, v) for k,v in kwargs.items() if (k not in session_argname and k != "create_all"))
+    kwargs_ce = dict((k, v) for k,v in kwargs.iteritems() if (k not in session_argname and k != "create_all"))
 
     engine = create_engine(*args, **kwargs_ce)
+    
+    if engine.name == "sqlite":
+        @event.listens_for(Engine, "connect")
+        def set_sqlite_pragma(dbapi_connection, connection_record):
+            cursor = dbapi_connection.cursor()
+            cursor.execute("PRAGMA foreign_keys=ON")
+            cursor.close()
+    
     metadata = Base.metadata        
                 
     kwargs_sm = {'bind': engine}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/script/lib/iri_tweet/iri_tweet/processor.py	Fri May 10 13:34:40 2013 +0200
@@ -0,0 +1,539 @@
+# -*- coding: utf-8 -*-
+'''
+Created on Apr 29, 2013
+
+@author: ymh
+'''
+from iri_tweet.models import (User, EntityType, adapt_json, MediaType, Media, 
+    EntityMedia, Hashtag, EntityHashtag, EntityUser, EntityUrl, Url, Entity, Tweet, 
+    TweetSource, TweetLog)
+from iri_tweet.utils import (ObjectsBuffer, adapt_fields, fields_adapter, 
+    ObjectBufferProxy, get_oauth_token, clean_keys)
+from sqlalchemy.orm import joinedload
+import anyjson
+import logging
+import twitter
+import twitter_text
+
+
+class TwitterProcessorException(Exception):
+    pass
+
+class TwitterProcessor(object):
+    def __init__(self, json_dict, json_txt, source_id, session, consumer_token, access_token=None, token_filename=None, user_query_twitter=False, logger=None):
+
+        if json_dict is None and json_txt is None:
+            raise TwitterProcessorException("No json")
+        
+        if json_dict is None:
+            self.json_dict = anyjson.deserialize(json_txt)
+        else:
+            self.json_dict = json_dict
+        
+        if not json_txt:
+            self.json_txt = anyjson.serialize(json_dict)
+        else:
+            self.json_txt = json_txt
+        
+        if "id" not in self.json_dict:
+            raise TwitterProcessorException("No id in json")
+        
+        self.source_id = source_id
+        self.session = session
+        self.consumer_key = consumer_token[0]
+        self.consumer_secret = consumer_token[1]
+        self.token_filename = token_filename
+        self.access_token = access_token
+        self.obj_buffer = ObjectsBuffer()
+        self.user_query_twitter = user_query_twitter
+        if not logger:
+            self.logger = logging.getLogger(__name__)
+        else:
+            self.logger = logger
+
+    def process(self):
+        if self.source_id is None:
+            tweet_source = self.obj_buffer.add_object(TweetSource, None, {'original_json':self.json_txt}, True)
+            self.source_id = tweet_source.id
+        self.process_source()
+        self.obj_buffer.persists(self.session)
+        
+    def process_source(self):
+        raise NotImplementedError()
+    
+    def log_info(self):
+        return "Process tweet %s" %  repr(self.__class__)    
+
+
+class TwitterProcessorStatus(TwitterProcessor):
+    
+    def __init__(self, json_dict, json_txt, source_id, session, consumer_token, access_token=None, token_filename=None, user_query_twitter=False, logger=None):
+        TwitterProcessor.__init__(self, json_dict, json_txt, source_id, session, consumer_token, access_token=access_token, token_filename=token_filename, user_query_twitter=user_query_twitter, logger=logger)
+        
+    def __get_user(self, user_dict, do_merge):
+        self.logger.debug("Get user : " + repr(user_dict)) #@UndefinedVariable
+        
+        user_dict = adapt_fields(user_dict, fields_adapter["stream"]["user"])
+    
+        user_id = user_dict.get("id",None)    
+        user_name = user_dict.get("screen_name", user_dict.get("name", None))
+        
+        if user_id is None and user_name is None:
+            return None
+
+        user = None
+        if user_id:
+            user = self.obj_buffer.get(User, id=user_id)
+        else:
+            user = self.obj_buffer.get(User, screen_name=user_name)
+
+        #to do update user id needed            
+        if user is not None:
+            user_created_at = None
+            if user.args is not None:
+                user_created_at = user.args.get('created_at', None)
+            if user_created_at is None and user_dict.get('created_at', None) is not None and do_merge:
+                if user.args is None:
+                    user.args = user_dict
+                else:
+                    user.args.update(user_dict)
+            return user
+
+        #todo : add methpds to objectbuffer to get buffer user
+        user_obj = None
+        if user_id:
+            user_obj = self.session.query(User).filter(User.id == user_id).first()
+        else:
+            user_obj = self.session.query(User).filter(User.screen_name.ilike(user_name)).first()
+    
+        #todo update user if needed
+        if user_obj is not None:            
+            if user_obj.created_at is not None or user_dict.get('created_at', None) is None or not do_merge :
+                user = ObjectBufferProxy(User, None, None, False, user_obj)
+            else:
+                user = self.obj_buffer.add_object(User, None, user_dict, True, user_obj)
+            return user
+    
+        user_created_at = user_dict.get("created_at", None)
+        
+        if user_created_at is None and self.user_query_twitter:
+            
+            if self.access_token is not None:
+                acess_token_key, access_token_secret = self.access_token
+            else:
+                acess_token_key, access_token_secret = get_oauth_token(consumer_key=self.consumer_key, consumer_secret=self.consumer_secret, token_file_path=self.token_filename)
+            #TODO pass it as argument    
+            t = twitter.Twitter(auth=twitter.OAuth(acess_token_key, access_token_secret, self.consumer_key, self.consumer_secret))
+            try:
+                if user_id:
+                    user_dict = t.users.show(user_id=user_id)
+                else:
+                    user_dict = t.users.show(screen_name=user_name)
+            except Exception as e:
+                self.logger.info("get_user : TWITTER ERROR : " + repr(e)) #@UndefinedVariable
+                self.logger.info("get_user : TWITTER ERROR : " + str(e)) #@UndefinedVariable
+                return None
+            
+        if "id" not in user_dict:
+            return None
+        
+        #TODO filter get, wrap in proxy
+        user_obj = self.session.query(User).filter(User.id == user_dict["id"]).first()
+        
+        if user_obj is not None and not do_merge:
+            return ObjectBufferProxy(User, None, None, False, user_obj)
+        else:        
+            return self.obj_buffer.add_object(User, None, user_dict, True)        
+
+    def __get_or_create_object(self, klass, filter_by_kwargs, filter_arg, creation_kwargs, must_flush, do_merge):
+        
+        obj_proxy = self.obj_buffer.get(klass, **filter_by_kwargs)
+        if obj_proxy is None:
+            query = self.session.query(klass)
+            if filter_arg is not None:
+                query = query.filter(filter_arg)
+            else:
+                query = query.filter_by(**filter_by_kwargs)
+            obj_instance = query.first()
+            if obj_instance is not None:
+                if not do_merge:
+                    obj_proxy = ObjectBufferProxy(klass, None, None, False, obj_instance)
+                else:
+                    obj_proxy = self.obj_buffer.add_object(klass, None, creation_kwargs, must_flush, obj_instance)
+        if obj_proxy is None:
+            obj_proxy = self.obj_buffer.add_object(klass, None, creation_kwargs, must_flush)
+        return obj_proxy
+
+
+    def __process_entity(self, ind, ind_type):
+        self.logger.debug("Process_entity : " + repr(ind) + " : " + repr(ind_type)) #@UndefinedVariable
+        
+        ind = clean_keys(ind)
+        
+        entity_type = self.__get_or_create_object(EntityType, {'label':ind_type}, None, {'label':ind_type}, True, False)
+        
+        entity_dict = {
+           "indice_start"   : ind["indices"][0],
+           "indice_end"     : ind["indices"][1],
+           "tweet_id"       : self.tweet.id,
+           "entity_type_id" : entity_type.id,
+           "source"         : adapt_json(ind)
+        }
+
+        def process_medias():
+            
+            media_id = ind.get('id', None)
+            if media_id is None:
+                return None, None
+            
+            type_str = ind.get("type", "photo")
+            media_type = self.__get_or_create_object(MediaType, {'label': type_str}, None, {'label':type_str}, True, False)
+            media_ind = adapt_fields(ind, fields_adapter["entities"]["medias"])
+            if "type" in media_ind:
+                del(media_ind["type"])
+            media_ind['type_id'] = media_type.id            
+            media = self.__get_or_create_object(Media, {'id':media_id}, None, media_ind, True, False)
+            
+            entity_dict['media_id'] = media.id
+            return EntityMedia, entity_dict
+
+        def process_hashtags():
+            text = ind.get("text", ind.get("hashtag", None))
+            if text is None:
+                return None, None
+            ind['text'] = text
+            hashtag = self.__get_or_create_object(Hashtag, {'text':text}, Hashtag.text.ilike(text), ind, True, False)
+            entity_dict['hashtag_id'] = hashtag.id
+            return EntityHashtag, entity_dict             
+        
+        def process_user_mentions():
+            user_mention = self.__get_user(ind, False)
+            if user_mention is None:
+                entity_dict['user_id'] = None
+            else:
+                entity_dict['user_id'] = user_mention.id
+            return EntityUser, entity_dict
+        
+        def process_urls():
+            url = self.__get_or_create_object(Url, {'url':ind["url"]}, None, ind, True, False)
+            entity_dict['url_id'] = url.id
+            return EntityUrl, entity_dict
+                
+        #{'': lambda }
+        entity_klass, entity_dict =  { 
+            'hashtags': process_hashtags,
+            'user_mentions' : process_user_mentions,
+            'urls' : process_urls,
+            'media': process_medias,
+            }.get(ind_type, lambda: (Entity, entity_dict))()
+            
+        self.logger.debug("Process_entity entity_dict: " + repr(entity_dict)) #@UndefinedVariable
+        if entity_klass:
+            self.obj_buffer.add_object(entity_klass, None, entity_dict, False)
+
+
+    def __process_twitter_stream(self):
+        
+        tweet_nb = self.session.query(Tweet).filter(Tweet.id == self.json_dict["id"]).count()
+        if tweet_nb > 0:
+            return
+        
+        ts_copy = adapt_fields(self.json_dict, fields_adapter["stream"]["tweet"])
+        
+        # get or create user
+        user = self.__get_user(self.json_dict["user"], True)
+        if user is None:
+            self.logger.warning("USER not found " + repr(self.json_dict["user"])) #@UndefinedVariable
+            ts_copy["user_id"] = None
+        else:
+            ts_copy["user_id"] = user.id
+            
+        del(ts_copy['user'])
+        ts_copy["tweet_source_id"] = self.source_id
+        
+        self.tweet = self.obj_buffer.add_object(Tweet, None, ts_copy, True)
+            
+        self.__process_entities()
+
+
+    def __process_entities(self):
+        if "entities" in self.json_dict:
+            for ind_type, entity_list in self.json_dict["entities"].iteritems():
+                for ind in entity_list:
+                    self.__process_entity(ind, ind_type)
+        else:
+            
+            text = self.tweet.text
+            extractor = twitter_text.Extractor(text)
+            for ind in extractor.extract_hashtags_with_indices():
+                self.__process_entity(ind, "hashtags")
+            
+            for ind in extractor.extract_urls_with_indices():
+                self.__process_entity(ind, "urls")
+            
+            for ind in extractor.extract_mentioned_screen_names_with_indices():
+                self.__process_entity(ind, "user_mentions")
+
+    def __process_twitter_rest(self):
+        tweet_nb = self.session.query(Tweet).filter(Tweet.id == self.json_dict["id"]).count()
+        if tweet_nb > 0:
+            return
+        
+        
+        tweet_fields = {
+            'created_at': self.json_dict["created_at"], 
+            'favorited': False,
+            'id': self.json_dict["id"],
+            'id_str': self.json_dict["id_str"],
+            #'in_reply_to_screen_name': ts["to_user"], 
+            'in_reply_to_user_id': self.json_dict.get("in_reply_to_user_id",None),
+            'in_reply_to_user_id_str': self.json_dict.get("in_reply_to_user_id_str", None),
+            #'place': ts["place"],
+            'source': self.json_dict["source"],
+            'text': self.json_dict["text"],
+            'truncated': False,            
+            'tweet_source_id' : self.source_id,
+        }
+        
+        #user
+    
+        user_fields = {
+            'lang' : self.json_dict.get('iso_language_code',None),
+            'profile_image_url' : self.json_dict["profile_image_url"],
+            'screen_name' : self.json_dict["from_user"],
+            'id' : self.json_dict["from_user_id"],
+            'id_str' : self.json_dict["from_user_id_str"],
+            'name' : self.json_dict['from_user_name'],
+        }
+        
+        user = self.__get_user(user_fields, do_merge=False)
+        if user is None:
+            self.logger.warning("USER not found " + repr(user_fields)) #@UndefinedVariable
+            tweet_fields["user_id"] = None
+        else:
+            tweet_fields["user_id"] = user.id
+        
+        tweet_fields = adapt_fields(tweet_fields, fields_adapter["rest"]["tweet"])
+        self.tweet = self.obj_buffer.add_object(Tweet, None, tweet_fields, True)
+                
+        self.__process_entities()
+
+
+
+    def process_source(self):
+                
+        status_id = self.json_dict["id"]
+        log = self.session.query(TweetLog).filter(TweetLog.status_id==status_id).first()
+        if(log):
+            self.obj_buffer.add_object(TweetLog, log, {'status': TweetLog.TWEET_STATUS['DELETE'], 'status_id': None})
+            self.session.query(TweetSource).filter(TweetSource.id==self.source_id).delete()
+        else:
+            if "metadata" in self.json_dict:
+                self.__process_twitter_rest()
+            else:
+                self.__process_twitter_stream()
+
+        self.obj_buffer.add_object(TweetLog, None, {'tweet_source_id':self.source_id, 'status':TweetLog.TWEET_STATUS['OK']}, True)
+
+    def log_info(self):
+        screen_name = self.json_dict.get("user",{}).get("screen_name","")
+        return u"Process Tweet from %s : %s" % (screen_name, self.json_dict.get('text',u""))
+
+
+
+class TwitterProcessorDelete(TwitterProcessor):
+    """
+    {
+      "delete":{
+        "status":{
+            "id":1234,
+            "id_str":"1234",
+            "user_id":3,
+            "user_id_str":"3"
+        }
+      }
+    }
+    """
+    def __init__(self, json_dict, json_txt, source_id, session, consumer_token, access_token=None, token_filename=None, user_query_twitter=False, logger=None):
+        TwitterProcessor.__init__(self, json_dict, json_txt, source_id, session, consumer_token, access_token=access_token, token_filename=token_filename, user_query_twitter=user_query_twitter, logger=logger)
+
+    def process(self):
+                   
+        #find tweet
+        tweet_id = self.json_dict.get('delete',{}).get('status',{}).get('id',None)
+        if tweet_id:
+            t = self.session.query(Tweet).options(joinedload(Tweet.tweet_source)).filter(Tweet.id == tweet_id).first()
+            if t:
+                tsource = t.tweet_source                
+                self.session.delete(t)
+                self.session.query(TweetLog).filter(TweetLog.tweet_source_id == tsource.id).delete()
+                self.session.delete(tsource)
+                self.obj_buffer.add_object(TweetLog, None, {'tweet_source_id':self.source_id, 'status':TweetLog.TWEET_STATUS['DELETE']}, True)
+            else:
+                self.obj_buffer.add_object(TweetLog, None, {'tweet_source_id':self.source_id, 'status_id': tweet_id,'status':TweetLog.TWEET_STATUS['DELETE_PENDING']}, True)
+                
+    def log_info(self):
+        status_del = self.json_dict.get('delete', {}).get("status",{})
+        return u"Process delete for %s : %s" % (status_del.get('user_id_str',u""), status_del.get('id_str',u""))
+
+class TwitterProcessorScrubGeo(TwitterProcessor):
+    """
+    {
+        "scrub_geo":{
+        "user_id":14090452,
+        "user_id_str":"14090452",
+        "up_to_status_id":23260136625,
+        "up_to_status_id_str":"23260136625"
+      }
+    }
+    """
+
+    def __init__(self, json_dict, json_txt, source_id, session, consumer_token, access_token=None, token_filename=None, user_query_twitter=False, logger=None):
+        TwitterProcessor.__init__(self, json_dict, json_txt, source_id, session, consumer_token, access_token=access_token, token_filename=token_filename, user_query_twitter=user_query_twitter, logger=logger)
+    
+    def process_source(self):        
+        up_to_status_id = self.json_dict.get("scrub_geo", {}).get("up_to_status_id", None)
+        if not up_to_status_id:
+            return
+        tweets = self.session.query(Tweet).options(joinedload(Tweet.tweet_source)).filter(Tweet.id <= up_to_status_id)
+        for t in tweets:
+            self.obj_buffer.add_object(Tweet, t, {'geo': None})
+            tsource = t.tweet_source
+            tsource_dict = anyjson.serialize(tsource.original_json)
+            if tsource_dict.get("geo", None):
+                tsource_dict["geo"] = None
+                self.obj_buffer.add_object(TweetSource, tsource, {'original_json': anyjson.serialize(tsource_dict)})
+        self.obj_buffer.add_object(TweetLog, None, {'tweet_source_id':self.source_id, 'status':TweetLog.TWEET_STATUS['SCRUB_GEO']}, True)
+    
+    def log_info(self):
+        return u"Process scrub geo for %s : %s" % (self.json_dict["scrub_geo"].get('user_id_str',u""), self.json_dict["scrub_geo"].get('id_str',u""))
+
+
+class TwitterProcessorLimit(TwitterProcessor):
+    """
+    {
+      "limit":{
+        "track":1234
+      }
+    }
+    """    
+    def __init__(self, json_dict, json_txt, source_id, session, consumer_token, access_token=None, token_filename=None, user_query_twitter=False, logger=None):
+        TwitterProcessor.__init__(self, json_dict, json_txt, source_id, session, consumer_token, access_token=access_token, token_filename=token_filename, user_query_twitter=user_query_twitter, logger=logger)
+
+    def process_source(self):
+        """
+        do nothing, just log the information
+        """    
+        self.obj_buffer.add_object(TweetLog, None, {'tweet_source_id':self.source_id, 'status':TweetLog.TWEET_STATUS['LIMIT'], 'error':self.json_txt}, True)
+        
+    def log_info(self):
+        return u"Process limit %d " % self.json_dict.get("limit", {}).get('track', 0)
+        
+class TwitterProcessorStatusWithheld(TwitterProcessor):
+    """
+    {
+      "status_withheld":{
+      "id":1234567890,
+      "user_id":123456,
+      "withheld_in_countries":["DE", "AR"]
+      }
+    }
+    """
+    def __init__(self, json_dict, json_txt, source_id, session, consumer_token, access_token=None, token_filename=None, user_query_twitter=False, logger=None):
+        TwitterProcessor.__init__(self, json_dict, json_txt, source_id, session, consumer_token, access_token=access_token, token_filename=token_filename, user_query_twitter=user_query_twitter, logger=logger)
+    
+    def process_source(self):
+        """
+        do nothing, just log the information
+        """
+        self.obj_buffer.add_object(TweetLog, None, {'tweet_source_id':self.source_id, 'status':TweetLog.TWEET_STATUS['STATUS_WITHHELD'], 'error':self.json_txt}, True)
+        
+    def log_info(self):
+        status_withheld = self.json_dict.get("status_withheld",{})
+        return u"Process status withheld status id %d from user %d in countries %s" %(status_withheld.get("id",0), status_withheld.get("user_id",0), u",".join(status_withheld.get("withheld_in_countries",[])))
+
+class TwitterProcessorUserWithheld(TwitterProcessor):
+    """
+    {  
+      "user_withheld":{
+        "id":123456,
+        "withheld_in_countries":["DE","AR"]
+      }
+    }
+    """
+    def __init__(self, json_dict, json_txt, source_id, session, consumer_token, access_token=None, token_filename=None, user_query_twitter=False, logger=None):
+        TwitterProcessor.__init__(self, json_dict, json_txt, source_id, session, consumer_token, access_token=access_token, token_filename=token_filename, user_query_twitter=user_query_twitter, logger=logger)
+    
+    def process_source(self):
+        """
+        do nothing, just log the information
+        """
+        self.obj_buffer.add_object(TweetLog, None, {'tweet_source_id':self.source_id, 'status':TweetLog.TWEET_STATUS['USER_WITHHELD'], 'error':self.json_txt}, True)
+
+
+    def log_info(self):
+        user_withheld = self.json_dict.get("user_withheld", {})
+        return u"Process user withheld %d in countries %s" % (user_withheld.get("id",0), u"".join(user_withheld.get("withheld_in_countries",[])))
+
+class TwitterProcessorDisconnect(TwitterProcessor):
+    """
+    {
+      "disconnect":{
+        "code": 4,
+        "stream_name":"< A stream identifier >",
+        "reason":"< Human readable status message >"
+      }
+    }
+    """
+    def __init__(self, json_dict, json_txt, source_id, session, consumer_token, access_token=None, token_filename=None, user_query_twitter=False, logger=None):
+        TwitterProcessor.__init__(self, json_dict, json_txt, source_id, session, consumer_token, access_token=access_token, token_filename=token_filename, user_query_twitter=user_query_twitter, logger=logger)
+    
+    def process_source(self):
+        """
+        do nothing, just log the information
+        """
+        self.obj_buffer.add_object(TweetLog, None, {'tweet_source_id':self.source_id, 'status':TweetLog.TWEET_STATUS['DISCONNECT'], 'error':self.json_txt}, True)
+
+    def log_info(self):
+        disconnect = self.json_dict.get("disconnect",{})
+        return u"Process disconnect stream %s code %d reason %s" % (disconnect.get("stream_name",""), disconnect.get("code",0), disconnect.get("reason",""))
+
+class TwitterProcessorStallWarning(TwitterProcessor):
+    """
+    {
+      "warning":{
+        "code":"FALLING_BEHIND",
+        "message":"Your connection is falling behind and messages are being queued for delivery to you. Your queue is now over 60% full. You will be disconnected when the queue is full.",
+        "percent_full": 60
+      }
+    }
+    """
+    def __init__(self, json_dict, json_txt, source_id, session, consumer_token, access_token=None, token_filename=None, user_query_twitter=False, logger=None):
+        TwitterProcessor.__init__(self, json_dict, json_txt, source_id, session, consumer_token, access_token=access_token, token_filename=token_filename, user_query_twitter=user_query_twitter, logger=logger)
+
+    def process_source(self):
+        """
+        do nothing, just log the information
+        """
+        self.obj_buffer.add_object(TweetLog, None, {'tweet_source_id':self.source_id, 'status':TweetLog.TWEET_STATUS['STALL_WARNING'], 'error':self.json_txt}, True)
+
+    def log_info(self):
+        warning = self.json_dict.get("warning",{})
+        return u"Process stall warning %d%% code %s, message %s" % (warning.get("percent_full",0),warning.get("code",u""), warning.get("message", u""))
+
+TWEET_PROCESSOR_MAP = {
+    'text': TwitterProcessorStatus,
+    'delete': TwitterProcessorDelete,
+    'scrub_geo': TwitterProcessorScrubGeo,
+    'limit': TwitterProcessorLimit,
+    'status_withheld': TwitterProcessorStatusWithheld,
+    'user_withheld': TwitterProcessorUserWithheld,
+    'disconnect': TwitterProcessorDisconnect,
+    'warning': TwitterProcessorStallWarning 
+}
+
+def get_processor(tweet_dict):
+    for processor_key,processor_klass in TWEET_PROCESSOR_MAP.iteritems():
+        if processor_key in tweet_dict:
+            return processor_klass
+    return None
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/script/lib/iri_tweet/iri_tweet/stream.py	Fri May 10 13:34:40 2013 +0200
@@ -0,0 +1,399 @@
+# -*- coding: utf-8 -*-
+'''
+Created on Mar 22, 2012
+
+@author: ymh
+
+Module directly inspired by tweetstream
+
+'''
+import time
+import requests
+from requests.utils import stream_decode_response_unicode
+import anyjson
+import select
+
+from . import  USER_AGENT, ConnectionError, AuthenticationError
+
+
+def iter_content_non_blocking(req, max_chunk_size=4096, decode_unicode=False, timeout=-1):
+    
+    if req._content_consumed:
+        raise RuntimeError(
+            'The content for this response was already consumed'
+        )
+    
+    req.raw._fp.fp._sock.setblocking(False)
+    
+    def generate():
+        chunk_size = 1        
+        while True:
+            if timeout < 0:
+                rlist,_,_ = select.select([req.raw._fp.fp._sock], [], [])
+            else:
+                rlist,_,_ = select.select([req.raw._fp.fp._sock], [], [], timeout)
+                
+            if not rlist:                 
+                continue
+            
+            try:
+                chunk = req.raw.read(chunk_size, decode_content=True)            
+                if not chunk:
+                    break
+                if len(chunk) >= chunk_size and chunk_size < max_chunk_size:
+                    chunk_size = min(chunk_size*2, max_chunk_size)
+                elif len(chunk) < chunk_size/2 and chunk_size < max_chunk_size:
+                    chunk_size = max(chunk_size/2,1)
+                yield chunk
+            except requests.exceptions.SSLError as e:
+                if e.errno == 2:
+                    # Apparently this means there was nothing in the socket buf
+                    pass
+                else:
+                    raise                
+            
+        req._content_consumed = True
+
+    gen = generate()
+
+    if decode_unicode:
+        gen = stream_decode_response_unicode(gen, req)
+
+    return gen
+
+    
+    
+
+class BaseStream(object):
+
+    """A network connection to Twitters streaming API
+
+    :param auth: requests auth object.
+    :keyword catchup: Number of tweets from the past to get before switching to
+      live stream.
+    :keyword raw: If True, return each tweet's raw data direct from the socket,
+      without UTF8 decoding or parsing, rather than a parsed object. The
+      default is False.
+    :keyword timeout: If non-None, set a timeout in seconds on the receiving
+      socket. Certain types of network problems (e.g., disconnecting a VPN)
+      can cause the connection to hang, leading to indefinite blocking that
+      requires kill -9 to resolve. Setting a timeout leads to an orderly
+      shutdown in these cases. The default is None (i.e., no timeout).
+    :keyword url: Endpoint URL for the object. Note: you should not
+      need to edit this. It's present to make testing easier.
+
+    .. attribute:: connected
+
+        True if the object is currently connected to the stream.
+
+    .. attribute:: url
+
+        The URL to which the object is connected
+
+    .. attribute:: starttime
+
+        The timestamp, in seconds since the epoch, the object connected to the
+        streaming api.
+
+    .. attribute:: count
+
+        The number of tweets that have been returned by the object.
+
+    .. attribute:: rate
+
+        The rate at which tweets have been returned from the object as a
+        float. see also :attr: `rate_period`.
+
+    .. attribute:: rate_period
+
+        The ammount of time to sample tweets to calculate tweet rate. By
+        default 10 seconds. Changes to this attribute will not be reflected
+        until the next time the rate is calculated. The rate of tweets vary
+        with time of day etc. so it's usefull to set this to something
+        sensible.
+
+    .. attribute:: user_agent
+
+        User agent string that will be included in the request. NOTE: This can
+        not be changed after the connection has been made. This property must
+        thus be set before accessing the iterator. The default is set in
+        :attr: `USER_AGENT`.
+    """
+
+    def __init__(self, auth,
+                  raw=False, timeout=-1, url=None, compressed=False, chunk_size=4096, logger=None):
+        self._conn = None
+        self._rate_ts = None
+        self._rate_cnt = 0
+        self._auth = auth
+        self.raw_mode = raw
+        self.timeout = timeout
+        self._compressed = compressed
+
+        self.rate_period = 10  # in seconds
+        self.connected = False
+        self.starttime = None
+        self.count = 0
+        self.rate = 0
+        self.user_agent = USER_AGENT
+        self.chunk_size = chunk_size
+        if url: self.url = url
+        
+        self.muststop = False
+        self._logger = logger
+        
+        self._iter = self.__iter__()
+         
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, *params):
+        self.close()
+        return False
+
+    def _init_conn(self):
+        """Open the connection to the twitter server"""
+        
+        if self._logger : self._logger.debug("BaseStream Open the connection to the twitter server")
+        
+        headers = {'User-Agent': self.user_agent}
+        
+        if self._compressed:
+            headers['Accept-Encoding'] = "deflate, gzip"
+
+        postdata = self._get_post_data() or {}
+        postdata['stall_warnings'] = 'true'
+        
+        if self._logger : self._logger.debug("BaseStream init connection url " + repr(self.url))
+        if self._logger : self._logger.debug("BaseStream init connection headers " + repr(headers))
+        if self._logger : self._logger.debug("BaseStream init connection data " + repr(postdata))
+        
+        self._resp = requests.post(self.url, auth=self._auth, headers=headers, data=postdata, stream=True)
+        if self._logger : self._logger.debug("BaseStream init connection " + repr(self._resp))
+        
+        self._resp.raise_for_status()
+        self.connected = True
+
+        if not self._rate_ts:
+            self._rate_ts = time.time()
+        if not self.starttime:
+            self.starttime = time.time()
+
+
+    def _get_post_data(self):
+        """Subclasses that need to add post data to the request can override
+        this method and return post data. The data should be in the format
+        returned by urllib.urlencode."""
+        return None
+
+    def testmuststop(self):
+        if callable(self.muststop):
+            return self.muststop()
+        else:
+            return self.muststop
+    
+    def _update_rate(self):
+        rate_time = time.time() - self._rate_ts
+        if not self._rate_ts or rate_time > self.rate_period:
+            self.rate = self._rate_cnt / rate_time
+            self._rate_cnt = 0
+            self._rate_ts = time.time()
+            
+    def _iter_object(self):
+   
+#        for line in self._resp.iter_lines():
+#            yield line     
+#         pending = None
+# 
+#         for chunk in self._resp.iter_content(chunk_size=self.chunk_size, decode_unicode=None):
+# 
+#             if pending is not None:
+#                 chunk = pending + chunk 
+#             lines = chunk.splitlines()
+# 
+#             if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
+#                 pending = lines.pop()
+#             else:
+#                 pending = None
+# 
+#             for line in lines:
+#                 yield line
+# 
+#         if pending is not None:
+#             yield pending
+            
+        pending = None
+        has_stopped = False
+         
+        if self._logger : self._logger.debug("BaseStream _iter_object")
+ 
+        for chunk in self._resp.iter_content(
+            chunk_size=self.chunk_size,
+            decode_unicode=None):
+ 
+            if self._logger : self._logger.debug("BaseStream _iter_object loop")
+            if self.testmuststop():
+                has_stopped = True
+                break
+ 
+            if pending is not None:
+                chunk = pending + chunk
+            lines = chunk.split('\r')
+ 
+            if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
+                pending = lines.pop()
+            else:
+                pending = None
+ 
+            for line in lines:
+                yield line.strip('\n')
+ 
+            if self.testmuststop():
+                has_stopped = True
+                break
+ 
+        if pending is not None:
+            yield pending
+        if has_stopped:
+            raise StopIteration()
+            
+    def __iter__(self):
+        
+        if self._logger : self._logger.debug("BaseStream __iter__")
+        if not self.connected:
+            if self._logger : self._logger.debug("BaseStream __iter__ not connected, connecting")
+            self._init_conn()
+
+        if self._logger : self._logger.debug("BaseStream __iter__ connected")
+        
+        for line in self._iter_object():
+
+            if self._logger : self._logger.debug("BaseStream __iter__ line %s " % repr(line))
+            
+            if not line:
+                continue
+
+            if (self.raw_mode):
+                tweet = line
+            else:
+                line = line.decode("utf8")
+                try:
+                    tweet = anyjson.deserialize(line)
+                except ValueError:
+                    self.close()
+                    raise ConnectionError("Got invalid data from twitter", details=line)
+            if 'text' in tweet:
+                self.count += 1
+                self._rate_cnt += 1
+            self._update_rate()
+            yield tweet
+
+
+    def next(self):
+        """Return the next available tweet. This call is blocking!"""
+        return self._iter.next()
+
+
+    def close(self):
+        """
+        Close the connection to the streaming server.
+        """
+        self.connected = False
+
+
+class FilterStream(BaseStream):
+    url = "https://stream.twitter.com/1.1/statuses/filter.json"
+
+    def __init__(self, auth, follow=None, locations=None,
+                 track=None, url=None, raw=False, timeout=None, compressed=False, chunk_size=requests.models.ITER_CHUNK_SIZE, logger=None):
+        self._follow = follow
+        self._locations = locations
+        self._track = track
+        # remove follow, locations, track
+        BaseStream.__init__(self, auth, url=url, raw=raw, timeout=timeout, compressed=compressed, chunk_size=chunk_size, logger=logger)
+
+    def _get_post_data(self):
+        postdata = {}
+        if self._follow: postdata["follow"] = ",".join([str(e) for e in self._follow])
+        if self._locations: postdata["locations"] = ",".join(self._locations)
+        if self._track: postdata["track"] = ",".join(self._track)
+        return postdata
+
+
+class SafeStreamWrapper(object):
+    
+    def __init__(self, base_stream, logger=None, error_cb=None, max_reconnects=-1, initial_tcp_wait=250, initial_http_wait=5000, max_wait=240000):
+        self._stream = base_stream
+        self._logger = logger
+        self._error_cb = error_cb
+        self._max_reconnects = max_reconnects
+        self._initial_tcp_wait = initial_tcp_wait
+        self._initial_http_wait = initial_http_wait
+        self._max_wait = max_wait
+        self._retry_wait = 0
+        self._retry_nb = 0
+        self._reconnects = 0
+
+    def __post_process_error(self,e):
+        # Note: error_cb is not called on the last error since we
+        # raise a ConnectionError instead
+        if  callable(self._error_cb):
+            self._error_cb(e)
+        if self._logger: self._logger.info("stream sleeping for %d ms " % self._retry_wait)
+        time.sleep(float(self._retry_wait)/1000.0)
+        
+        
+    def __process_tcp_error(self,e):
+        if self._logger: self._logger.debug("connection error type :" + repr(type(e)))
+        if self._logger: self._logger.debug("connection error :" + repr(e))
+        
+        self._reconnects += 1
+        if self._max_reconnects >= 0 and self._reconnects > self._max_reconnects:
+            raise ConnectionError("Too many retries")
+        if self._retry_wait < self._max_wait:
+            self._retry_wait += self._initial_tcp_wait
+            if self._retry_wait > self._max_wait:
+                self._retry_wait = self._max_wait
+        
+        self.__post_process_error(e)
+
+        
+    def __process_http_error(self,e):
+        if self._logger: self._logger.debug("http error type %s" % (repr(type(e))))
+        if self._logger: self._logger.debug("http error on %s : %s" % (e.response.url,e.message))
+
+        if self._retry_wait < self._max_wait:
+            self._retry_wait = 2*self._retry_wait if self._retry_wait > 0 else self._initial_http_wait
+            if self._retry_wait > self._max_wait:
+                self._retry_wait = self._max_wait
+        
+        self.__post_process_error(e)
+        
+    def __iter__(self):
+        while not self._stream.testmuststop():
+            self._retry_nb += 1
+            try:
+                if self._logger: self._logger.debug("inner loop")
+                for tweet in self._stream:
+                    if self._logger: self._logger.debug("tweet : " + repr(tweet))
+                    self._reconnects = 0
+                    self._retry_wait = 0
+                    if not tweet.strip():
+                        if self._logger: self._logger.debug("Empty Tweet received : PING")
+                        continue
+                    yield tweet
+            except requests.exceptions.HTTPError as e:
+                if e.response.status_code == 401:
+                    if self._logger: self._logger.debug("SafeStreamWrapper Connection Error http error on %s : %s" % (e.response.url,e.message))
+                    raise AuthenticationError("Error connecting to %s : %s : %s - %s" % (e.response.url,e.message, repr(e.response.headers),repr(e.response.text)))
+                if e.response.status_code > 200:
+                    self.__process_http_error(e)
+                else:
+                    self.__process_tcp_error(e)
+            except (ConnectionError, requests.exceptions.ConnectionError, requests.exceptions.Timeout, requests.exceptions.RequestException) as e:
+                self.__process_tcp_error(e)
+
+        
+    
+    
\ No newline at end of file
--- a/script/lib/iri_tweet/iri_tweet/tests.py	Tue May 07 18:28:26 2013 +0200
+++ b/script/lib/iri_tweet/iri_tweet/tests.py	Fri May 10 13:34:40 2013 +0200
@@ -3,10 +3,14 @@
 from sqlalchemy.orm import relationship, backref
 import unittest #@UnresolvedImport
 from sqlalchemy.orm import sessionmaker
-from iri_tweet.utils import ObjectsBuffer, TwitterProcessor
+from iri_tweet.utils import ObjectsBuffer
+from iri_tweet.processor import TwitterProcessorStatus
 from iri_tweet import models
 import tempfile #@UnresolvedImport
 import os
+import logging
+
+logger = logging.getLogger(__name__);
 
 Base = declarative_base()
 
@@ -129,12 +133,12 @@
     def setUp(self):
         self.engine, self.metadata, sessionMaker = models.setup_database('sqlite:///:memory:', echo=True)
         self.session = sessionMaker()
-        file, self.tmpfilepath = tempfile.mkstemp()
-        os.close(file)
+        tmpfile, self.tmpfilepath = tempfile.mkstemp()
+        os.close(tmpfile)
 
 
     def testTwitterProcessor(self):
-        tp = TwitterProcessor(None, original_json, None, self.session, self.tmpfilepath)
+        tp = TwitterProcessorStatus(None, original_json, None, self.session, self.tmpfilepath, logger)
         tp.process()
         self.session.commit()
         
@@ -154,7 +158,7 @@
         
 
     def testTwitterProcessorMedia(self):
-        tp = TwitterProcessor(None, original_json_media, None, self.session, self.tmpfilepath)
+        tp = TwitterProcessorStatus(None, original_json_media, None, self.session, self.tmpfilepath, logger)
         tp.process()
         self.session.commit()
         
@@ -174,7 +178,7 @@
 
 
     def testTwitterProcessorMediaOthers(self):
-        tp = TwitterProcessor(None, original_json_media_others, None, self.session, self.tmpfilepath)
+        tp = TwitterProcessorStatus(None, original_json_media_others, None, self.session, self.tmpfilepath, logger)
         tp.process()
         self.session.commit()
         
--- a/script/lib/iri_tweet/iri_tweet/utils.py	Tue May 07 18:28:26 2013 +0200
+++ b/script/lib/iri_tweet/iri_tweet/utils.py	Fri May 10 13:34:40 2013 +0200
@@ -1,25 +1,22 @@
-from models import (Tweet, User, Hashtag, EntityHashtag, EntityUser, Url, 
-    EntityUrl, CONSUMER_KEY, CONSUMER_SECRET, APPLICATION_NAME, ACCESS_TOKEN_KEY, 
-    ACCESS_TOKEN_SECRET, adapt_date, adapt_json, TweetSource, TweetLog, MediaType, 
-    Media, EntityMedia, Entity, EntityType)
-from sqlalchemy.sql import select, or_ #@UnresolvedImport
-import Queue #@UnresolvedImport
-import anyjson #@UnresolvedImport
+from models import (Tweet, User, Hashtag, EntityHashtag, APPLICATION_NAME, ACCESS_TOKEN_SECRET, adapt_date, adapt_json, 
+    ACCESS_TOKEN_KEY)
+from sqlalchemy.sql import select, or_
+import Queue
 import codecs
 import datetime
 import email.utils
 import logging
 import math
 import os.path
+import socket
 import sys
-import twitter.oauth #@UnresolvedImport
-import twitter.oauth_dance #@UnresolvedImport
-import twitter_text #@UnresolvedImport
+import twitter.oauth
+import twitter.oauth_dance
 
 
 CACHE_ACCESS_TOKEN = {}
 
-def get_oauth_token(token_file_path=None, check_access_token=True, application_name=APPLICATION_NAME, consumer_key=CONSUMER_KEY, consumer_secret=CONSUMER_SECRET):
+def get_oauth_token(consumer_key, consumer_secret, token_file_path=None, check_access_token=True, application_name=APPLICATION_NAME):
     
     global CACHE_ACCESS_TOKEN
 
@@ -34,24 +31,27 @@
     
     if res is not None and check_access_token:
         get_logger().debug("get_oauth_token : Check oauth tokens") #@UndefinedVariable
-        t = twitter.Twitter(auth=twitter.OAuth(res[0], res[1], CONSUMER_KEY, CONSUMER_SECRET))
+        t = twitter.Twitter(auth=twitter.OAuth(res[0], res[1], consumer_key, consumer_secret))
         status = None
         try:
-            status = t.account.rate_limit_status()
+            status = t.application.rate_limit_status(resources="account")
         except Exception as e:
-            get_logger().debug("get_oauth_token : error getting rate limit status %s" % repr(e))
+            get_logger().debug("get_oauth_token : error getting rate limit status %s " % repr(e))            
+            get_logger().debug("get_oauth_token : error getting rate limit status %s " % str(e))
             status = None
         get_logger().debug("get_oauth_token : Check oauth tokens : status %s" % repr(status)) #@UndefinedVariable
-        if status is None or status['remaining_hits'] == 0:
+        if status is None or status.get("resources",{}).get("account",{}).get('/account/verify_credentials',{}).get('remaining',0) == 0:
             get_logger().debug("get_oauth_token : Problem with status %s" % repr(status))
             res = None
 
     if res is None:
         get_logger().debug("get_oauth_token : doing the oauth dance")
         res = twitter.oauth_dance(application_name, consumer_key, consumer_secret, token_file_path)
+        
     
     CACHE_ACCESS_TOKEN[application_name] = res
     
+    get_logger().debug("get_oauth_token : done got %s" % repr(res))
     return res
 
 def parse_date(date_str):
@@ -59,7 +59,7 @@
     return datetime.datetime(*ts[0:7])
 
 def clean_keys(dict_val):
-    return dict([(str(key),value) for key,value in dict_val.items()])
+    return dict([(str(key),value) for key,value in dict_val.iteritems()])
 
 fields_adapter = {
     'stream': {
@@ -100,7 +100,7 @@
             return adapter_mapping[field](value)
         else:
             return value
-    return dict([(str(k),adapt_one_field(k,v)) for k,v in fields_dict.items()])    
+    return dict([(str(k),adapt_one_field(k,v)) for k,v in fields_dict.iteritems()])    
 
 
 class ObjectBufferProxy(object):
@@ -113,7 +113,7 @@
         
     def persists(self, session):
         new_args = [arg() if callable(arg) else arg for arg in self.args] if self.args is not None else []
-        new_kwargs = dict([(k,v()) if callable(v) else (k,v) for k,v in self.kwargs.items()]) if self.kwargs is not None else {}
+        new_kwargs = dict([(k,v()) if callable(v) else (k,v) for k,v in self.kwargs.iteritems()]) if self.kwargs is not None else {}
         
         if self.instance is None:
             self.instance = self.klass(*new_args, **new_kwargs)
@@ -160,7 +160,7 @@
                 if proxy.kwargs is None or len(proxy.kwargs) == 0 or proxy.klass != klass:
                     continue
                 found = True
-                for k,v in kwargs.items():
+                for k,v in kwargs.iteritems():
                     if (k not in proxy.kwargs) or v != proxy.kwargs[k]:
                         found = False
                         break
@@ -168,301 +168,6 @@
                     return proxy        
         return None
                 
-class TwitterProcessorException(Exception):
-    pass
-
-class TwitterProcessor(object):
-    
-    def __init__(self, json_dict, json_txt, source_id, session, access_token=None, token_filename=None, user_query_twitter=False):
-
-        if json_dict is None and json_txt is None:
-            raise TwitterProcessorException("No json")
-        
-        if json_dict is None:
-            self.json_dict = anyjson.deserialize(json_txt)
-        else:
-            self.json_dict = json_dict
-        
-        if not json_txt:
-            self.json_txt = anyjson.serialize(json_dict)
-        else:
-            self.json_txt = json_txt
-        
-        if "id" not in self.json_dict:
-            raise TwitterProcessorException("No id in json")
-        
-        self.source_id = source_id
-        self.session = session
-        self.token_filename = token_filename
-        self.access_token = access_token
-        self.obj_buffer = ObjectsBuffer()
-        self.user_query_twitter = user_query_twitter  
-        
-
-
-    def __get_user(self, user_dict, do_merge):
-        get_logger().debug("Get user : " + repr(user_dict)) #@UndefinedVariable
-        
-        user_dict = adapt_fields(user_dict, fields_adapter["stream"]["user"])
-    
-        user_id = user_dict.get("id",None)    
-        user_name = user_dict.get("screen_name", user_dict.get("name", None))
-        
-        if user_id is None and user_name is None:
-            return None
-
-        user = None
-        if user_id:
-            user = self.obj_buffer.get(User, id=user_id)
-        else:
-            user = self.obj_buffer.get(User, screen_name=user_name)
-
-        #to do update user id needed            
-        if user is not None:
-            user_created_at = None
-            if user.args is not None:
-                user_created_at = user.args.get('created_at', None)
-            if user_created_at is None and user_dict.get('created_at', None) is not None and do_merge:
-                if user.args is None:
-                    user.args = user_dict
-                else:
-                    user.args.update(user_dict)
-            return user
-
-        #todo : add methpds to objectbuffer to get buffer user
-        user_obj = None
-        if user_id:
-            user_obj = self.session.query(User).filter(User.id == user_id).first()
-        else:
-            user_obj = self.session.query(User).filter(User.screen_name.ilike(user_name)).first()
-    
-        #todo update user if needed
-        if user_obj is not None:            
-            if user_obj.created_at is not None or user_dict.get('created_at', None) is None or not do_merge :
-                user = ObjectBufferProxy(User, None, None, False, user_obj)
-            else:
-                user = self.obj_buffer.add_object(User, None, user_dict, True, user_obj)
-            return user
-    
-        user_created_at = user_dict.get("created_at", None)
-        
-        if user_created_at is None and self.user_query_twitter:
-            
-            if self.access_token is not None:
-                acess_token_key, access_token_secret = self.access_token
-            else:
-                acess_token_key, access_token_secret = get_oauth_token(self.token_filename)
-            t = twitter.Twitter(auth=twitter.OAuth(acess_token_key, access_token_secret, CONSUMER_KEY, CONSUMER_SECRET))
-            try:
-                if user_id:
-                    user_dict = t.users.show(user_id=user_id)
-                else:
-                    user_dict = t.users.show(screen_name=user_name)            
-            except Exception as e:
-                get_logger().info("get_user : TWITTER ERROR : " + repr(e)) #@UndefinedVariable
-                get_logger().info("get_user : TWITTER ERROR : " + str(e)) #@UndefinedVariable
-                return None
-            
-        if "id" not in user_dict:
-            return None
-        
-        #TODO filter get, wrap in proxy
-        user_obj = self.session.query(User).filter(User.id == user_dict["id"]).first()
-        
-        if user_obj is not None and not do_merge:
-            return ObjectBufferProxy(User, None, None, False, user_obj)
-        else:        
-            return self.obj_buffer.add_object(User, None, user_dict, True)        
-
-    def __get_or_create_object(self, klass, filter_by_kwargs, filter_arg, creation_kwargs, must_flush, do_merge):
-        
-        obj_proxy = self.obj_buffer.get(klass, **filter_by_kwargs)
-        if obj_proxy is None:
-            query = self.session.query(klass)
-            if filter_arg is not None:
-                query = query.filter(filter_arg)
-            else:
-                query = query.filter_by(**filter_by_kwargs)
-            obj_instance = query.first()
-            if obj_instance is not None:
-                if not do_merge:
-                    obj_proxy = ObjectBufferProxy(klass, None, None, False, obj_instance)
-                else:
-                    obj_proxy = self.obj_buffer.add_object(klass, None, creation_kwargs, must_flush, obj_instance)
-        if obj_proxy is None:
-            obj_proxy = self.obj_buffer.add_object(klass, None, creation_kwargs, must_flush)
-        return obj_proxy
-
-
-    def __process_entity(self, ind, ind_type):
-        get_logger().debug("Process_entity : " + repr(ind) + " : " + repr(ind_type)) #@UndefinedVariable
-        
-        ind = clean_keys(ind)
-        
-        entity_type = self.__get_or_create_object(EntityType, {'label':ind_type}, None, {'label':ind_type}, True, False)
-        
-        entity_dict = {
-           "indice_start"   : ind["indices"][0],
-           "indice_end"     : ind["indices"][1],
-           "tweet_id"       : self.tweet.id,
-           "entity_type_id" : entity_type.id,
-           "source"         : adapt_json(ind)
-        }
-
-        def process_medias():
-            
-            media_id = ind.get('id', None)
-            if media_id is None:
-                return None, None
-            
-            type_str = ind.get("type", "photo")
-            media_type = self.__get_or_create_object(MediaType, {'label': type_str}, None, {'label':type_str}, True, False)
-            media_ind = adapt_fields(ind, fields_adapter["entities"]["medias"])
-            if "type" in media_ind:
-                del(media_ind["type"])
-            media_ind['type_id'] = media_type.id            
-            media = self.__get_or_create_object(Media, {'id':media_id}, None, media_ind, True, False)
-            
-            entity_dict['media_id'] = media.id
-            return EntityMedia, entity_dict
-
-        def process_hashtags():
-            text = ind.get("text", ind.get("hashtag", None))
-            if text is None:
-                return None, None
-            ind['text'] = text
-            hashtag = self.__get_or_create_object(Hashtag, {'text':text}, Hashtag.text.ilike(text), ind, True, False)
-            entity_dict['hashtag_id'] = hashtag.id
-            return EntityHashtag, entity_dict             
-        
-        def process_user_mentions():
-            user_mention = self.__get_user(ind, False)
-            if user_mention is None:
-                entity_dict['user_id'] = None
-            else:
-                entity_dict['user_id'] = user_mention.id
-            return EntityUser, entity_dict
-        
-        def process_urls():
-            url = self.__get_or_create_object(Url, {'url':ind["url"]}, None, ind, True, False)
-            entity_dict['url_id'] = url.id
-            return EntityUrl, entity_dict
-                
-        #{'': lambda }
-        entity_klass, entity_dict =  { 
-            'hashtags': process_hashtags,
-            'user_mentions' : process_user_mentions,
-            'urls' : process_urls,
-            'media': process_medias,
-            }.get(ind_type, lambda: (Entity, entity_dict))()
-            
-        get_logger().debug("Process_entity entity_dict: " + repr(entity_dict)) #@UndefinedVariable
-        if entity_klass:
-            self.obj_buffer.add_object(entity_klass, None, entity_dict, False)
-
-
-    def __process_twitter_stream(self):
-        
-        tweet_nb = self.session.query(Tweet).filter(Tweet.id == self.json_dict["id"]).count()
-        if tweet_nb > 0:
-            return
-        
-        ts_copy = adapt_fields(self.json_dict, fields_adapter["stream"]["tweet"])
-        
-        # get or create user
-        user = self.__get_user(self.json_dict["user"], True)
-        if user is None:
-            get_logger().warning("USER not found " + repr(self.json_dict["user"])) #@UndefinedVariable
-            ts_copy["user_id"] = None
-        else:
-            ts_copy["user_id"] = user.id
-            
-        del(ts_copy['user'])
-        ts_copy["tweet_source_id"] = self.source_id
-        
-        self.tweet = self.obj_buffer.add_object(Tweet, None, ts_copy, True)
-            
-        self.__process_entities()
-
-
-    def __process_entities(self):
-        if "entities" in self.json_dict:
-            for ind_type, entity_list in self.json_dict["entities"].items():
-                for ind in entity_list:
-                    self.__process_entity(ind, ind_type)
-        else:
-            
-            text = self.tweet.text
-            extractor = twitter_text.Extractor(text)
-            for ind in extractor.extract_hashtags_with_indices():
-                self.__process_entity(ind, "hashtags")
-            
-            for ind in extractor.extract_urls_with_indices():
-                self.__process_entity(ind, "urls")
-            
-            for ind in extractor.extract_mentioned_screen_names_with_indices():
-                self.__process_entity(ind, "user_mentions")
-
-    def __process_twitter_rest(self):
-        tweet_nb = self.session.query(Tweet).filter(Tweet.id == self.json_dict["id"]).count()
-        if tweet_nb > 0:
-            return
-        
-        
-        tweet_fields = {
-            'created_at': self.json_dict["created_at"], 
-            'favorited': False,
-            'id': self.json_dict["id"],
-            'id_str': self.json_dict["id_str"],
-            #'in_reply_to_screen_name': ts["to_user"], 
-            'in_reply_to_user_id': self.json_dict.get("in_reply_to_user_id",None),
-            'in_reply_to_user_id_str': self.json_dict.get("in_reply_to_user_id_str", None),
-            #'place': ts["place"],
-            'source': self.json_dict["source"],
-            'text': self.json_dict["text"],
-            'truncated': False,            
-            'tweet_source_id' : self.source_id,
-        }
-        
-        #user
-    
-        user_fields = {
-            'lang' : self.json_dict.get('iso_language_code',None),
-            'profile_image_url' : self.json_dict["profile_image_url"],
-            'screen_name' : self.json_dict["from_user"],
-            'id' : self.json_dict["from_user_id"],
-            'id_str' : self.json_dict["from_user_id_str"],
-            'name' : self.json_dict['from_user_name'],
-        }
-        
-        user = self.__get_user(user_fields, do_merge=False)
-        if user is None:
-            get_logger().warning("USER not found " + repr(user_fields)) #@UndefinedVariable
-            tweet_fields["user_id"] = None
-        else:
-            tweet_fields["user_id"] = user.id
-        
-        tweet_fields = adapt_fields(tweet_fields, fields_adapter["rest"]["tweet"])
-        self.tweet = self.obj_buffer.add_object(Tweet, None, tweet_fields, True)
-                
-        self.__process_entities()
-
-
-
-    def process(self):
-        
-        if self.source_id is None:
-            tweet_source = self.obj_buffer.add_object(TweetSource, None, {'original_json':self.json_txt}, True)
-            self.source_id = tweet_source.id
-        
-        if "metadata" in self.json_dict:
-            self.__process_twitter_rest()
-        else:
-            self.__process_twitter_stream()
-
-        self.obj_buffer.add_object(TweetLog, None, {'tweet_source_id':self.source_id, 'status':TweetLog.TWEET_STATUS['OK']}, True)
-        
-        self.obj_buffer.persists(self.session)
 
 
 def set_logging(options, plogger=None, queue=None):
@@ -507,12 +212,12 @@
     return logger
 
 def set_logging_options(parser):
-    parser.add_option("-l", "--log", dest="logfile",
+    parser.add_argument("-l", "--log", dest="logfile",
                       help="log to file", metavar="LOG", default="stderr")
-    parser.add_option("-v", dest="verbose", action="count",
-                      help="verbose", metavar="VERBOSE", default=0)
-    parser.add_option("-q", dest="quiet", action="count",
-                      help="quiet", metavar="QUIET", default=0)
+    parser.add_argument("-v", dest="verbose", action="count",
+                      help="verbose", default=0)
+    parser.add_argument("-q", dest="quiet", action="count",
+                      help="quiet", default=0)
 
 def get_base_query(session, query, start_date, end_date, hashtags, tweet_exclude_table, user_whitelist):
     
@@ -561,19 +266,17 @@
 
 def get_logger():
     global logger_name
-    return logging.getLogger(logger_name) #@UndefinedVariable
+    return logging.getLogger(logger_name)
 
 
-# Next two import lines for this demo only
-
-class QueueHandler(logging.Handler): #@UndefinedVariable
+class QueueHandler(logging.Handler):
     """
     This is a logging handler which sends events to a multiprocessing queue.    
     """
 
     def __init__(self, queue, ignore_full):
         """
-        Initialise an instance, using the passed queue.
+        Initialize an instance, using the passed queue.
         """
         logging.Handler.__init__(self) #@UndefinedVariable
         self.queue = queue
@@ -588,10 +291,12 @@
         try:
             ei = record.exc_info
             if ei:
-                dummy = self.format(record) # just to get traceback text into record.exc_text
+                _ = self.format(record) # just to get traceback text into record.exc_text
                 record.exc_info = None  # not needed any more
-            if not self.ignore_full or not self.queue.full():
+            if not self.ignore_full or (not self.queue.full()):
                 self.queue.put_nowait(record)
+        except AssertionError:
+            pass
         except Queue.Full:
             if self.ignore_full:
                 pass
@@ -615,9 +320,9 @@
     spaces = math.floor(width - marks)
 
     loader = u'[' + (u'=' * int(marks)) + (u' ' * int(spaces)) + u']'
-        
+
     s = u"%s %3d%% %*d/%d - %*s\r" % (loader, percent, len(str(total_line)), current_line, total_line, width, label[:width])
-    
+
     writer.write(s) #takes the header into account
     if percent >= 100:
         writer.write("\n")
@@ -625,3 +330,10 @@
     
     return writer
 
+def get_unused_port():
+    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+    s.bind(('localhost', 0))
+    _, port = s.getsockname()
+    s.close()
+    return port
+
--- a/script/lib/iri_tweet/setup.py	Tue May 07 18:28:26 2013 +0200
+++ b/script/lib/iri_tweet/setup.py	Fri May 10 13:34:40 2013 +0200
@@ -45,7 +45,7 @@
         if line.strip() == '# -eof meta-':
             break
         acc.append(line)
-        for pattern, handler in pats.items():
+        for pattern, handler in pats.iteritems():
             m = pattern.match(line.strip())
             if m:
                 meta.update(handler(m))
--- a/script/lib/tweetstream/CHANGELOG	Tue May 07 18:28:26 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,68 +0,0 @@
-0.1
-
- - Initial version
-
-0.2
-
- - Improved error handling
- - Added AuthenticationError and ConnectionError exceptions
- - Added ReconnectingTweetStream class that supports automatically
-   reconnecting if the connection is dropped
-
-0.3
-
- - Fixed bugs in authtentication
- - Added TrackStream and FollowStream classes
- - Added list of endpoint names, and made them legal values for the url arg
-
-0.3.1
-
- - Added lots of tests
- - Added proper handling of keepalive newlines
- - Improved handling of closing streams
- - Added missing anyjson dependency to setup
- - Fixed bug where newlines and malformed content were counted as a tweet
-
-0.3.2
-
- - This release was skipped over, due to maintainer brainfart.
-
-0.3.3
-
- - Fixed setup.py so it wont attempt to load modules that aren't installed
-   yet. Fixes installation issue.
-
-0.3.4
-
- - Updated to latest twitter streaming urls
- - Fixed a bug where we tried to call a method on None
-
-0.3.5
-
- - Removed a spurious print statement left over from debugging
- - Introduced common base class for all tweetstream exceptions
- - Make sure we raise a sensible error on 404. Include url in desc of that error
-
-0.3.6
-
- - Added LocationStream class for filtering on location bounding boxes.
-
-1.0.0
-
- - Changed API to match latest twitter endpoints. This adds SampleStream and
-   FilterStream and deprecates TweetStream, FollowStream, LocationStream,
-   TrackStream and ReconnectingTweetStream.
-
-1.1.0
-
- - Fixed issues #2 and #12, related to low volume streams not yielding tweets
-   until a relatively large buffer was filled. This meant that tweets would
-   arrive in bunches, not immediatly.
- - Switched to HTTPS urls for streams. Twitter will switch off HTTP streams
-   on 29. sept. 2011.
- - Added support for Python 3
-
-1.1.1
-
- - Fixed issue #16. Odd case where python_version_tuple was returning ints
-   rather than the strings the docs promis. Make sure we always cast to int.
--- a/script/lib/tweetstream/LICENSE	Tue May 07 18:28:26 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,27 +0,0 @@
-Copyright (c) 2009, Rune Halvorsen
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
-
-    * Redistributions of source code must retain the above copyright notice,
-      this list of conditions and the following disclaimer.
-    * Redistributions in binary form must reproduce the above copyright
-      notice, this list of conditions and the following disclaimer in the
-      documentation and/or other materials provided with the distribution.
-
-Neither the name of Rune Halvorsen nor the names of its contributors may be
-used to endorse or promote products derived from this software without
-specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
-BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-POSSIBILITY OF SUCH DAMAGE.
--- a/script/lib/tweetstream/README	Tue May 07 18:28:26 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,110 +0,0 @@
-.. -*- restructuredtext -*-
-
-##########################################
-tweetstream - Simple twitter streaming API
-##########################################
-
-Introduction
-------------
-
-tweetstream provides two classes, SampleStream and FollowStream, that can be
-used to get tweets from Twitter's streaming API. An instance of one of the
-classes can be used as an iterator. In addition to fetching tweets, the 
-object keeps track of the number of tweets collected and the rate at which
-tweets are received.
-
-SampleStream delivers a sample of all tweets. FilterStream delivers
-tweets that match one or more criteria. Note that it's not possible
-to get all tweets without access to the "firehose" stream, which
-is not currently avaliable to the public.
-
-Twitter's documentation about the streaming API can be found here:
-http://dev.twitter.com/pages/streaming_api_methods .
-
-**Note** that the API is blocking. If for some reason data is not immediatly
-available, calls will block until enough data is available to yield a tweet.
-
-Examples
---------
-
-Printing incoming tweets:
-
->>> stream = tweetstream.SampleStream("username", "password")
->>> for tweet in stream:
-...     print tweet
-
-
-The stream object can also be used as a context, as in this example that
-prints the author for each tweet as well as the tweet count and rate:
-
->>> with tweetstream.SampleStream("username", "password") as stream
-...     for tweet in stream:
-...         print "Got tweet from %-16s\t( tweet %d, rate %.1f tweets/sec)" % (
-...                 tweet["user"]["screen_name"], stream.count, stream.rate )
-
-
-Stream objects can raise ConnectionError or AuthenticationError exceptions:
-
->>> try:
-...     with tweetstream.TweetStream("username", "password") as stream
-...         for tweet in stream:
-...             print "Got tweet from %-16s\t( tweet %d, rate %.1f tweets/sec)" % (
-...                     tweet["user"]["screen_name"], stream.count, stream.rate )
-... except tweetstream.ConnectionError, e:
-...     print "Disconnected from twitter. Reason:", e.reason
-
-To get tweets that match specific criteria, use the FilterStream. FilterStreams
-take three keyword arguments: "locations", "follow" and "track".
-
-Locations are a list of bounding boxes in which geotagged tweets should originate.
-The argument should be an iterable of longitude/latitude pairs.
-
-Track specifies keywords to track. The argument should be an iterable of
-strings.
-
-Follow returns statuses that reference given users. Argument should be an iterable
-of twitter user IDs. The IDs are userid ints, not the screen names. 
-
->>> words = ["opera", "firefox", "safari"]
->>> people = [123,124,125]
->>> locations = ["-122.75,36.8", "-121.75,37.8"]
->>> with tweetstream.FilterStream("username", "password", track=words,
-...                               follow=people, locations=locations) as stream
-...     for tweet in stream:
-...         print "Got interesting tweet:", tweet
-
-
-Deprecated classes
-------------------
-
-tweetstream used to contain the classes TweetStream, FollowStream, TrackStream
-LocationStream and ReconnectingTweetStream. These were deprecated when twitter
-changed its API end points. The same functionality is now available in
-SampleStream and FilterStream. The deprecated methods will emit a warning when
-used, but will remain functional for a while longer.
-
-
-Changelog
----------
-
-See the CHANGELOG file
-
-Contact
--------
-
-The author is Rune Halvorsen <runefh@gmail.com>. The project resides at
-http://bitbucket.org/runeh/tweetstream . If you find bugs, or have feature
-requests, please report them in the project site issue tracker. Patches are
-also very welcome.
-
-Contributors
-------------
-
-- Rune Halvorsen
-- Christopher Schierkolk
-
-License
--------
-
-This software is licensed under the ``New BSD License``. See the ``LICENCE``
-file in the top distribution directory for the full license text.
--- a/script/lib/tweetstream/conftest.py	Tue May 07 18:28:26 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,10 +0,0 @@
-# content of conftest.py
-
-import pytest
-def pytest_addoption(parser):
-    parser.addoption("--runslow", action="store_true",
-        help="run slow tests")
-
-def pytest_runtest_setup(item):
-    if 'slow' in item.keywords and not item.config.getvalue("runslow"):
-        pytest.skip("need --runslow option to run")
\ No newline at end of file
--- a/script/lib/tweetstream/servercontext.py	Tue May 07 18:28:26 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,221 +0,0 @@
-import threading
-import contextlib
-import time
-import os
-import socket
-import random
-from functools import partial
-from inspect import isclass
-from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
-from SimpleHTTPServer import SimpleHTTPRequestHandler
-from SocketServer import BaseRequestHandler
-
-
-class ServerError(Exception):
-    pass
-
-
-class ServerContext(object):
-    """Context object with information about a running test server."""
-
-    def __init__(self, address, port):
-        self.address = address or "localhost"
-        self.port = port
-
-    @property
-    def baseurl(self):
-        return "http://%s:%s" % (self.address, self.port)
-
-    def __str__(self):
-        return "<ServerContext %s >" % self.baseurl
-
-    __repr__ = __str__
-
-
-class _SilentSimpleHTTPRequestHandler(SimpleHTTPRequestHandler):
-
-    def __init__(self, *args, **kwargs):
-        self.logging = kwargs.get("logging", False)
-        SimpleHTTPRequestHandler.__init__(self, *args, **kwargs)
-
-    def log_message(self, *args, **kwargs):
-        if self.logging:
-            SimpleHTTPRequestHandler.log_message(self, *args, **kwargs)
-
-
-class _TestHandler(BaseHTTPRequestHandler):
-    """RequestHandler class that handles requests that use a custom handler
-    callable."""
-
-    def __init__(self, handler, methods, *args, **kwargs):
-        self._handler = handler
-        self._methods = methods
-        self._response_sent = False
-        self._headers_sent = False
-        self.logging = kwargs.get("logging", False)
-        BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
-
-    def log_message(self, *args, **kwargs):
-        if self.logging:
-            BaseHTTPRequestHandler.log_message(self, *args, **kwargs)
-
-    def send_response(self, *args, **kwargs):
-        self._response_sent = True
-        BaseHTTPRequestHandler.send_response(self, *args, **kwargs)
-
-    def end_headers(self, *args, **kwargs):
-        self._headers_sent = True
-        BaseHTTPRequestHandler.end_headers(self, *args, **kwargs)
-
-    def _do_whatever(self):
-        """Called in place of do_METHOD"""
-        data = self._handler(self)
-
-        if hasattr(data, "next"):
-            # assume it's something supporting generator protocol
-            self._handle_with_iterator(data)
-        else:
-            # Nothing more to do then.
-            pass
-
-
-    def __getattr__(self, name):
-        if name.startswith("do_") and name[3:].lower() in self._methods:
-            return self._do_whatever
-        else:
-            # fixme instance or class?
-            raise AttributeError(name)
-
-    def _handle_with_iterator(self, iterator):
-        self.connection.settimeout(0.1)
-        for data in iterator:
-            if not self.server.server_thread.running:
-                return
-
-            if not self._response_sent:
-                self.send_response(200)
-            if not self._headers_sent:
-                self.end_headers()
-
-            self.wfile.write(data)
-            # flush immediatly. We may want to do trickling writes
-            # or something else tha trequires bypassing normal caching
-            self.wfile.flush()
-
-class _TestServerThread(threading.Thread):
-    """Thread class for a running test server"""
-
-    def __init__(self, handler, methods, cwd, port, address):
-        threading.Thread.__init__(self)
-        self.startup_finished = threading.Event()
-        self._methods = methods
-        self._cwd = cwd
-        self._orig_cwd = None
-        self._handler = self._wrap_handler(handler, methods)
-        self._setup()
-        self.running = True
-        self.serverloc = (address, port)
-        self.error = None
-
-    def _wrap_handler(self, handler, methods):
-        if isclass(handler) and issubclass(handler, BaseRequestHandler):
-            return handler # It's OK. user passed in a proper handler
-        elif callable(handler):
-            return partial(_TestHandler, handler, methods)
-            # it's a callable, so wrap in a req handler
-        else:
-            raise ServerError("handler must be callable or RequestHandler")
-
-    def _setup(self):
-        if self._cwd != "./":
-            self._orig_cwd = os.getcwd()
-            os.chdir(self._cwd)
-
-    def _init_server(self):
-        """Hooks up the server socket"""
-        try:
-            if self.serverloc[1] == "random":
-                retries = 10 # try getting an available port max this many times
-                while True:
-                    try:
-                        self.serverloc = (self.serverloc[0],
-                                          random.randint(1025, 49151))
-                        self._server = HTTPServer(self.serverloc, self._handler)
-                    except socket.error:
-                        retries -= 1
-                        if not retries: # not able to get a port.
-                            raise
-                    else:
-                        break
-            else: # use specific port. this might throw, that's expected
-                self._server = HTTPServer(self.serverloc, self._handler)
-        except socket.error, e:
-            self.running = False
-            self.error = e
-            # set this here, since we'll never enter the serve loop where
-            # it is usually set:
-            self.startup_finished.set()
-            return
-
-        self._server.allow_reuse_address = True # lots of tests, same port
-        self._server.timeout = 0.1
-        self._server.server_thread = self
-
-
-    def run(self):
-        self._init_server()
-
-        while self.running:
-            self._server.handle_request() # blocks for self.timeout secs
-            # First time this falls through, signal the parent thread that
-            # the server is ready for incomming connections
-            if not self.startup_finished.is_set():
-                self.startup_finished.set()
-
-        self._cleanup()
-
-    def stop(self):
-        """Stop the server and attempt to make the thread terminate.
-        This happens async but the calling code can check periodically
-        the isRunning flag on the thread object.
-        """
-        # actual stopping happens in the run method
-        self.running = False
-
-    def _cleanup(self):
-        """Do some rudimentary cleanup."""
-        if self._orig_cwd:
-            os.chdir(self._orig_cwd)
-
-
-@contextlib.contextmanager
-def test_server(handler=_SilentSimpleHTTPRequestHandler, port=8514,
-                address="", methods=("get", "head"), cwd="./"):
-    """Context that makes available a web server in a separate thread"""
-    thread = _TestServerThread(handler=handler, methods=methods, cwd=cwd,
-                               port=port, address=address)
-    thread.start()
-
-    # fixme: should this be daemonized? If it isn't it will block the entire
-    # app, but that should never happen anyway..
-    thread.startup_finished.wait()
-
-    if thread.error: # startup failed! Bail, throw whatever the server did
-        raise thread.error
-
-    exc = None
-    try:
-        yield ServerContext(*thread.serverloc)
-    except Exception, exc:
-        pass
-    thread.stop()
-    thread.join(5) # giving it a lot of leeway. should never happen
-
-    if exc:
-        raise exc
-
-    # fixme: this takes second priorty after the internal exception but would
-    # still be nice to signal back to calling code.
-
-    if thread.isAlive():
-        raise Warning("Test server could not be stopped")
--- a/script/lib/tweetstream/setup.py	Tue May 07 18:28:26 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,82 +0,0 @@
-#@PydevCodeAnalysisIgnore
-import sys
-import os
-
-extra = {}
-if sys.version_info >= (3, 0):
-    extra.update(use_2to3=True)
-
-
-try:
-    from setuptools import setup, find_packages
-except ImportError:
-    from distutils.core import setup, find_packages
-
-
-# -*- Distribution Meta -*-
-import re
-re_meta = re.compile(r'__(\w+?)__\s*=\s*(.*)')
-re_vers = re.compile(r'VERSION\s*=\s*\((.*?)\)')
-re_doc = re.compile(r'^"""(.+?)"""', re.M|re.S)
-rq = lambda s: s.strip("\"'")
-
-
-def add_default(m):
-    attr_name, attr_value = m.groups()
-    return ((attr_name, rq(attr_value)), )
-
-
-def add_version(m):
-    v = list(map(rq, m.groups()[0].split(", ")))
-    return (("VERSION", ".".join(v[0:3]) + "".join(v[3:])), )
-
-
-def add_doc(m):
-    return (("doc", m.groups()[0].replace("\n", " ")), )
-
-pats = {re_meta: add_default,
-        re_vers: add_version}
-here = os.path.abspath(os.path.dirname(__file__))
-meta_fh = open(os.path.join(here, "tweetstream/__init__.py"))
-try:
-    meta = {}
-    acc = []
-    for line in meta_fh:
-        if line.strip() == '# -eof meta-':
-            break
-        acc.append(line)
-        for pattern, handler in pats.items():
-            m = pattern.match(line.strip())
-            if m:
-                meta.update(handler(m))
-    m = re_doc.match("".join(acc).strip())
-    if m:
-        meta.update(add_doc(m))
-finally:
-    meta_fh.close()
-
-
-setup(name='tweetstream',
-    version=meta["VERSION"],
-    description=meta["doc"],
-    long_description=open("README").read(),
-    classifiers=[
-        'License :: OSI Approved :: BSD License',
-        'Intended Audience :: Developers',
-        'Programming Language :: Python :: 2.6',
-        'Programming Language :: Python :: 2.7',
-        'Programming Language :: Python :: 3',
-        'Programming Language :: Python :: 3.1',
-    ],
-    keywords='twitter',
-    author=meta["author"],
-    author_email=meta["contact"],
-    url=meta["homepage"],
-    license='BSD',
-    packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
-    include_package_data=True,
-    zip_safe=False,
-    platforms=["any"],
-    install_requires=['anyjson'],
-    **extra
-)
--- a/script/lib/tweetstream/tests/test_tweetstream.py	Tue May 07 18:28:26 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,188 +0,0 @@
-import contextlib
-import threading
-import time
-
-from tweetstream import TweetStream, FollowStream, TrackStream, LocationStream
-from tweetstream import ConnectionError, AuthenticationError, SampleStream, FilterStream
-from tweepy.auth import BasicAuthHandler   
-
-import pytest
-from pytest import raises
-slow = pytest.mark.slow
-
-from servercontext import test_server
-
-single_tweet = r"""{"in_reply_to_status_id":null,"in_reply_to_user_id":null,"favorited":false,"created_at":"Tue Jun 16 10:40:14 +0000 2009","in_reply_to_screen_name":null,"text":"record industry just keeps on amazing me: http:\/\/is.gd\/13lFo - $150k per song you've SHARED, not that somebody has actually DOWNLOADED.","user":{"notifications":null,"profile_background_tile":false,"followers_count":206,"time_zone":"Copenhagen","utc_offset":3600,"friends_count":191,"profile_background_color":"ffffff","profile_image_url":"http:\/\/s3.amazonaws.com\/twitter_production\/profile_images\/250715794\/profile_normal.png","description":"Digital product developer, currently at Opera Software. My tweets are my opinions, not those of my employer.","verified_profile":false,"protected":false,"favourites_count":0,"profile_text_color":"3C3940","screen_name":"eiriksnilsen","name":"Eirik Stridsklev N.","following":null,"created_at":"Tue May 06 12:24:12 +0000 2008","profile_background_image_url":"http:\/\/s3.amazonaws.com\/twitter_production\/profile_background_images\/10531192\/160x600opera15.gif","profile_link_color":"0099B9","profile_sidebar_fill_color":"95E8EC","url":"http:\/\/www.stridsklev-nilsen.no\/eirik","id":14672543,"statuses_count":506,"profile_sidebar_border_color":"5ED4DC","location":"Oslo, Norway"},"id":2190767504,"truncated":false,"source":"<a href=\"http:\/\/widgets.opera.com\/widget\/7206\">Twitter Opera widget<\/a>"}""" + "\r"
-
-
-def parameterized(funcarglist):
-    def wrapper(function):
-        function.funcarglist = funcarglist
-        return function
-    return wrapper
-
-def pytest_generate_tests(metafunc):
-    for funcargs in getattr(metafunc.function, 'funcarglist', ()):
-        metafunc.addcall(funcargs=funcargs)
-
-
-streamtypes = [
-    dict(cls=TweetStream, args=[], kwargs=dict()),
-    dict(cls=SampleStream, args=[], kwargs=dict()),
-    dict(cls=FilterStream, args=[], kwargs=dict(track=("test",))),
-    dict(cls=FollowStream, args=[[1, 2, 3]], kwargs=dict()),
-    dict(cls=TrackStream, args=["opera"], kwargs=dict()),
-    dict(cls=LocationStream, args=["123,4321"], kwargs=dict())
-]
-
-
-@parameterized(streamtypes)
-def test_bad_auth(cls, args, kwargs):
-    """Test that the proper exception is raised when the user could not be
-    authenticated"""
-    def auth_denied(request):
-        request.send_error(401)
-
-    with raises(AuthenticationError):
-        with test_server(handler=auth_denied, methods=("post", "get"), port="random") as server:
-            auth = BasicAuthHandler("user", "passwd")
-            stream = cls(auth, *args, url=server.baseurl)
-            for e in stream: pass
-
-
-@parameterized(streamtypes)
-def test_404_url(cls, args, kwargs):
-    """Test that the proper exception is raised when the stream URL can't be
-    found"""
-    def not_found(request):
-        request.send_error(404)
-
-    with raises(ConnectionError):
-        with test_server(handler=not_found, methods=("post", "get"), port="random") as server:
-            auth = BasicAuthHandler("user", "passwd")
-            stream = cls(auth, *args, url=server.baseurl)
-            for e in stream: pass
-
-
-@parameterized(streamtypes)
-def test_bad_content(cls, args, kwargs):
-    """Test error handling if we are given invalid data"""
-    def bad_content(request):
-        for n in xrange(10):
-            # what json we pass doesn't matter. It's not verifying the
-            # strcuture, only checking that it's parsable
-            yield "[1,2,3]\r"
-        yield "[1,2, I need no stinking close brace\r"
-        yield "[1,2,3]\r"
-
-
-    with raises(ConnectionError):
-        with test_server(handler=bad_content, methods=("post", "get"), port="random") as server:
-            auth = BasicAuthHandler("user", "passwd")
-            stream = cls(auth, *args, url=server.baseurl)
-            for tweet in stream:
-                pass
-
-
-@parameterized(streamtypes)
-def test_closed_connection(cls, args, kwargs):
-    """Test error handling if server unexpectedly closes connection"""
-    cnt = 1000
-    def bad_content(request):
-        for n in xrange(cnt):
-            # what json we pass doesn't matter. It's not verifying the
-            # strcuture, only checking that it's parsable
-            yield "[1,2,3]\r"
-
-    with raises(ConnectionError):
-        with test_server(handler=bad_content, methods=("post", "get"), port="random") as server:
-            auth = BasicAuthHandler("foo", "bar")
-            stream = cls(auth, *args, url=server.baseurl)
-            for tweet in stream:
-                pass
-
-
-@parameterized(streamtypes)
-def test_bad_host(cls, args, kwargs):
-    """Test behaviour if we can't connect to the host"""
-    with raises(ConnectionError):
-        stream = cls("username", "passwd", *args, url="http://wedfwecfghhreewerewads.foo")
-        stream.next()
-
-
-@parameterized(streamtypes)
-def smoke_test_receive_tweets(cls, args, kwargs):
-    """Receive 100k tweets and disconnect (slow)"""
-    total = 100000
-
-    def tweetsource(request):
-        while True:
-            yield single_tweet + "\n"
-
-    with test_server(handler=tweetsource, methods=("post", "get"), port="random") as server:
-        auth = BasicAuthHandler("foo", "bar")
-        stream = cls(auth, *args, url=server.baseurl)
-        for tweet in stream:
-            if stream.count == total:
-                break
-
-
-@parameterized(streamtypes)
-def test_keepalive(cls, args, kwargs):
-    """Make sure we behave sanely when there are keepalive newlines in the
-    data recevived from twitter"""
-    def tweetsource(request):
-        yield single_tweet+"\n"
-        yield "\n"
-        yield "\n"
-        yield single_tweet+"\n"
-        yield "\n"
-        yield "\n"
-        yield "\n"
-        yield "\n"
-        yield "\n"
-        yield "\n"
-        yield "\n"
-        yield single_tweet+"\n"
-        yield "\n"
-
-
-    with test_server(handler=tweetsource, methods=("post", "get"), port="random") as server:
-        auth = BasicAuthHandler("foo", "bar")
-        stream = cls(auth, *args, url=server.baseurl)
-        try:
-            for tweet in stream:
-                pass
-        except ConnectionError:
-            assert stream.count == 3, "Got %s, wanted 3" % stream.count
-        else:
-            assert False, "Didn't handle keepalive"
-
-
-@slow
-@parameterized(streamtypes)
-def test_buffering(cls, args, kwargs):
-    """Test if buffering stops data from being returned immediately.
-    If there is some buffering in play that might mean data is only returned
-    from the generator when the buffer is full. If buffer is bigger than a
-    tweet, this will happen. Default buffer size in the part of socket lib
-    that enables readline is 8k. Max tweet length is around 3k."""
-
-    def tweetsource(request):
-        yield single_tweet+"\n"
-        time.sleep(2)
-        # need to yield a bunch here so we're sure we'll return from the
-        # blocking call in case the buffering bug is present.
-        for n in xrange(100):
-            yield single_tweet+"\n"
-
-
-    with test_server(handler=tweetsource, methods=("post", "get"), port="random") as server:
-        auth = BasicAuthHandler("foo", "bar")
-        stream = cls(auth, *args, url=server.baseurl)
-        start = time.time()
-        stream.next()
-        first = time.time()
-        diff = first - start
-        assert diff < 1, "Getting first tweet took more than a second!"
-
--- a/script/lib/tweetstream/tox.ini	Tue May 07 18:28:26 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,16 +0,0 @@
-[tox]
-envlist = py25,py26,py27,py30,py31,py32
-
-[testenv]
-deps=pytest
-sitepackages=False
-commands=py.test --runslow
-
-[testenv:py30]
-changedir = .tox
-
-[testenv:py31]
-changedir = .tox
-
-[testenv:py32]
-changedir = .tox
\ No newline at end of file
--- a/script/lib/tweetstream/tweetstream/__init__.py	Tue May 07 18:28:26 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,45 +0,0 @@
-"""Simple access to Twitter's streaming API"""
-
-VERSION = (1, 1, 1, 'iri')
-__version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:])
-__author__ = "Rune Halvorsen"
-__contact__ = "runefh@gmail.com"
-__homepage__ = "http://bitbucket.org/runeh/tweetstream/"
-__docformat__ = "restructuredtext"
-
-# -eof meta-
-
-
-"""
- .. data:: USER_AGENT
-
-     The default user agent string for stream objects
-"""
-
-USER_AGENT = "TweetStream %s" % __version__
-
-
-class TweetStreamError(Exception):
-    """Base class for all tweetstream errors"""
-    pass
-
-
-class AuthenticationError(TweetStreamError):
-    """Exception raised if the username/password is not accepted"""
-    pass
-
-
-class ConnectionError(TweetStreamError):
-    """Raised when there are network problems. This means when there are
-    dns errors, network errors, twitter issues"""
-
-    def __init__(self, reason, details=None):
-        self.reason = reason
-        self.details = details
-
-    def __str__(self):
-        return '<ConnectionError %s>' % self.reason
-
-
-from .streamclasses import SampleStream, FilterStream
-from .deprecated import FollowStream, TrackStream, LocationStream, TweetStream, ReconnectingTweetStream
--- a/script/lib/tweetstream/tweetstream/deprecated.py	Tue May 07 18:28:26 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,82 +0,0 @@
-from .streamclasses import FilterStream, SampleStream, ConnectionError
-import time
-
-class DeprecatedStream(FilterStream):
-    def __init__(self, *args, **kwargs):
-        import warnings
-        warnings.warn("%s is deprecated. Use FilterStream instead" % self.__class__.__name__, DeprecationWarning)
-        super(DeprecatedStream, self).__init__(*args, **kwargs)
-
-
-class FollowStream(DeprecatedStream):
-    def __init__(self, auth, follow, catchup=None, url=None):
-        super(FollowStream, self).__init__(auth, follow=follow, catchup=catchup, url=url)
-
-
-class TrackStream(DeprecatedStream):
-    def __init__(self, auth, track, catchup=None, url=None, slow=False):
-        super(TrackStream, self).__init__(auth, track=track, catchup=catchup, url=url)
-
-
-class LocationStream(DeprecatedStream):
-    def __init__(self, auth, locations, catchup=None, url=None, slow=False):
-        super(LocationStream, self).__init__(auth, locations=locations, catchup=catchup, url=url)
-
-
-class TweetStream(SampleStream):
-    def __init__(self, *args, **kwargs):
-        import warnings
-        warnings.warn("%s is deprecated. Use SampleStream instead" % self.__class__.__name__, DeprecationWarning)
-        SampleStream.__init__(self, *args, **kwargs)
-
-
-class ReconnectingTweetStream(TweetStream):
-    """TweetStream class that automatically tries to reconnect if the
-    connecting goes down. Reconnecting, and waiting for reconnecting, is
-    blocking.
-
-    :param username: See :TweetStream:
-
-    :param password: See :TweetStream:
-
-    :keyword url: See :TweetStream:
-
-    :keyword reconnects: Number of reconnects before a ConnectionError is
-        raised. Default is 3
-
-    :error_cb: Optional callable that will be called just before trying to
-        reconnect. The callback will be called with a single argument, the
-        exception that caused the reconnect attempt. Default is None
-
-    :retry_wait: Time to wait before reconnecting in seconds. Default is 5
-
-    """
-
-    def __init__(self, auth, url="sample",
-                 reconnects=3, error_cb=None, retry_wait=5):
-        self.max_reconnects = reconnects
-        self.retry_wait = retry_wait
-        self._reconnects = 0
-        self._error_cb = error_cb
-        TweetStream.__init__(self, auth, url=url)
-
-    def next(self):
-        while True:
-            try:
-                return TweetStream.next(self)
-            except ConnectionError, e:
-                self._reconnects += 1
-                if self._reconnects > self.max_reconnects:
-                    raise ConnectionError("Too many retries")
-
-                # Note: error_cb is not called on the last error since we
-                # raise a ConnectionError instead
-                if  callable(self._error_cb):
-                    self._error_cb(e)
-
-                time.sleep(self.retry_wait)
-        # Don't listen to auth error, since we can't reasonably reconnect
-        # when we get one.
-
-
-
--- a/script/lib/tweetstream/tweetstream/streamclasses.py	Tue May 07 18:28:26 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,256 +0,0 @@
-import time
-import urllib
-import urllib2
-import socket
-from platform import python_version_tuple
-import anyjson
-
-from . import AuthenticationError, ConnectionError, USER_AGENT
-
-class BaseStream(object):
-    """A network connection to Twitters streaming API
-
-    :param auth: tweepy auth object.
-    :keyword catchup: Number of tweets from the past to get before switching to
-      live stream.
-    :keyword raw: If True, return each tweet's raw data direct from the socket,
-      without UTF8 decoding or parsing, rather than a parsed object. The
-      default is False.
-    :keyword timeout: If non-None, set a timeout in seconds on the receiving
-      socket. Certain types of network problems (e.g., disconnecting a VPN)
-      can cause the connection to hang, leading to indefinite blocking that
-      requires kill -9 to resolve. Setting a timeout leads to an orderly
-      shutdown in these cases. The default is None (i.e., no timeout).
-    :keyword url: Endpoint URL for the object. Note: you should not
-      need to edit this. It's present to make testing easier.
-
-    .. attribute:: connected
-
-        True if the object is currently connected to the stream.
-
-    .. attribute:: url
-
-        The URL to which the object is connected
-
-    .. attribute:: starttime
-
-        The timestamp, in seconds since the epoch, the object connected to the
-        streaming api.
-
-    .. attribute:: count
-
-        The number of tweets that have been returned by the object.
-
-    .. attribute:: rate
-
-        The rate at which tweets have been returned from the object as a
-        float. see also :attr: `rate_period`.
-
-    .. attribute:: rate_period
-
-        The ammount of time to sample tweets to calculate tweet rate. By
-        default 10 seconds. Changes to this attribute will not be reflected
-        until the next time the rate is calculated. The rate of tweets vary
-        with time of day etc. so it's usefull to set this to something
-        sensible.
-
-    .. attribute:: user_agent
-
-        User agent string that will be included in the request. NOTE: This can
-        not be changed after the connection has been made. This property must
-        thus be set before accessing the iterator. The default is set in
-        :attr: `USER_AGENT`.
-    """
-
-    def __init__(self, auth,
-                 catchup=None, raw=False, timeout=None, url=None):
-        self._conn = None
-        self._rate_ts = None
-        self._rate_cnt = 0
-        self._auth = auth
-        self._catchup_count = catchup
-        self._raw_mode = raw
-        self._timeout = timeout
-        self._iter = self.__iter__()
-
-        self.rate_period = 10  # in seconds
-        self.connected = False
-        self.starttime = None
-        self.count = 0
-        self.rate = 0
-        self.user_agent = USER_AGENT
-        if url: self.url = url
-        
-        self.muststop = False
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, *params):
-        self.close()
-        return False
-
-    def _init_conn(self):
-        """Open the connection to the twitter server"""
-        headers = {'User-Agent': self.user_agent}
-
-        postdata = self._get_post_data() or {}
-        if self._catchup_count:
-            postdata["count"] = self._catchup_count
-
-        poststring = urllib.urlencode(postdata) if postdata else None
-        
-        if self._auth:
-            self._auth.apply_auth(self.url, "POST", headers, postdata)
-                
-        req = urllib2.Request(self.url, poststring, headers)
-
-        try:
-            self._conn = urllib2.urlopen(req, timeout=self._timeout)
-
-        except urllib2.HTTPError, exception:
-            if exception.code == 401:
-                raise AuthenticationError("Access denied")
-            elif exception.code == 404:
-                raise ConnectionError("URL not found: %s" % self.url)
-            else:  # re raise. No idea what would cause this, so want to know
-                raise
-        except urllib2.URLError, exception:
-            raise ConnectionError(exception.reason)
-
-        # This is horrible. This line grabs the raw socket (actually an ssl
-        # wrapped socket) from the guts of urllib2/httplib. We want the raw
-        # socket so we can bypass the buffering that those libs provide.
-        # The buffering is reasonable when dealing with connections that
-        # try to finish as soon as possible. With twitters' never ending
-        # connections, it causes a bug where we would not deliver tweets
-        # until the buffer was full. That's problematic for very low volume
-        # filterstreams, since you might not see a tweet for minutes or hours
-        # after they occured while the buffer fills.
-        #
-        # Oh, and the inards of the http libs are different things on in
-        # py2 and 3, so need to deal with that. py3 libs do more of what I
-        # want by default, but I wont do more special casing for it than
-        # neccessary.
-
-        major, _, _ = python_version_tuple()
-        # The cast is needed because apparently some versions return strings
-        # and some return ints.
-        # On my ubuntu with stock 2.6 I get strings, which match the docs.
-        # Someone reported the issue on 2.6.1 on macos, but that was
-        # manually built, not the bundled one. Anyway, cast for safety.
-        major = int(major)
-        if major == 2:
-            self._socket = self._conn.fp._sock.fp._sock
-        else:
-            self._socket = self._conn.fp.raw
-            # our code that reads from the socket expects a method called recv.
-            # py3 socket.SocketIO uses the name read, so alias it.
-            self._socket.recv = self._socket.read
-
-        self.connected = True
-        if not self.starttime:
-            self.starttime = time.time()
-        if not self._rate_ts:
-            self._rate_ts = time.time()
-
-    def _get_post_data(self):
-        """Subclasses that need to add post data to the request can override
-        this method and return post data. The data should be in the format
-        returned by urllib.urlencode."""
-        return None
-
-    def __muststop(self):
-        if callable(self.muststop):
-            return self.muststop()
-        else:
-            return self.muststop
-    
-    def _update_rate(self):
-        rate_time = time.time() - self._rate_ts
-        if not self._rate_ts or rate_time > self.rate_period:
-            self.rate = self._rate_cnt / rate_time
-            self._rate_cnt = 0
-            self._rate_ts = time.time()
-
-    def __iter__(self):
-        buf = b""
-        while True:
-            try:
-                if self.__muststop():
-                    raise StopIteration()
-                
-                if not self.connected:
-                    self._init_conn()
-
-                buf += self._socket.recv(8192)
-                if buf == b"":  # something is wrong
-                    self.close()
-                    raise ConnectionError("Got entry of length 0. Disconnected")
-                elif buf.isspace():
-                    buf = b""
-                elif b"\r" not in buf: # not enough data yet. Loop around
-                    continue
-
-                lines = buf.split(b"\r")
-                buf = lines[-1]
-                lines = lines[:-1]
-
-                for line in lines:
-                    if (self._raw_mode):
-                        tweet = line
-                    else:
-                        line = line.decode("utf8")
-                        try:
-                            tweet = anyjson.deserialize(line)
-                        except ValueError, e:
-                            self.close()
-                            raise ConnectionError("Got invalid data from twitter", details=line)
-
-                    if 'text' in tweet:
-                        self.count += 1
-                        self._rate_cnt += 1
-                    self._update_rate()
-                    yield tweet
-
-
-            except socket.error, e:
-                self.close()
-                raise ConnectionError("Server disconnected")
-
-
-    def next(self):
-        """Return the next available tweet. This call is blocking!"""
-        return self._iter.next()
-
-
-    def close(self):
-        """
-        Close the connection to the streaming server.
-        """
-        self.connected = False
-        if self._conn:
-            self._conn.close()
-
-
-class SampleStream(BaseStream):
-    url = "https://stream.twitter.com/1/statuses/sample.json"
-
-
-class FilterStream(BaseStream):
-    url = "https://stream.twitter.com/1/statuses/filter.json"
-
-    def __init__(self, auth, follow=None, locations=None,
-                 track=None, catchup=None, url=None, raw=False, timeout=None):
-        self._follow = follow
-        self._locations = locations
-        self._track = track
-        # remove follow, locations, track
-        BaseStream.__init__(self, auth, url=url, raw=raw, catchup=catchup, timeout=timeout)
-
-    def _get_post_data(self):
-        postdata = {}
-        if self._follow: postdata["follow"] = ",".join([str(e) for e in self._follow])
-        if self._locations: postdata["locations"] = ",".join(self._locations)
-        if self._track: postdata["track"] = ",".join(self._track)
-        return postdata
--- a/script/rest/enmi_profile.py	Tue May 07 18:28:26 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,134 +0,0 @@
-import twython
-from sqlite3 import *
-import datetime, time
-import email.utils
-from optparse import OptionParser
-import os.path
-import os
-import sys
-import simplejson
-
-
-#options filename rpp page total_pages start_date end_date
-
-
- 
-def adapt_datetime(ts):
-    return time.mktime(ts.timetuple())
-    
-def adapt_geo(geo):
-	return simplejson.dumps(geo)
-	
-def convert_geo(s):
-	return simplejson.loads(s)
-
-
-register_adapter(datetime.datetime, adapt_datetime)
-register_converter("geo", convert_geo)
-
-columns_tweet = [u'favorited', u'truncated', u'text', u'created_at', u'source', u'in_reply_to_status_id', u'in_reply_to_screen_name', u'in_reply_to_user_id', u'geo', u'id', u'user']
-columns_user = [u'id', u'verified', u'profile_sidebar_fill_color', u'profile_text_color', u'followers_count', u'protected', u'location', u'profile_background_color', u'utc_offset', u'statuses_count', u'description', u'friends_count', u'profile_link_color', u'profile_image_url', u'notifications', u'geo_enabled', u'profile_background_image_url', u'screen_name', u'profile_background_tile', u'favourites_count', u'name', u'url', u'created_at', u'time_zone', u'profile_sidebar_border_color', u'following']
-
-def processDate(entry):
-    ts = email.utils.parsedate(entry["created_at"])
-    entry["created_at_ts"] = datetime.datetime.fromtimestamp(time.mktime(ts))
-
-def processPage(page, cursor, debug):
-    for entry in page:
-        if debug:
-            print "ENTRY : " + repr(entry)
-        curs.execute("select id from tweet_tweet where id = ?", (entry["id"],))
-        res = curs.fetchone()
-        if res:
-            continue
-
-        entry_user = entry["user"]
-        processDate(entry_user)
-        cursor.execute("insert into tweet_user ("+",".join(entry_user.keys())+") values (:"+",:".join(entry_user.keys())+");", entry_user);
-        new_id = cursor.lastrowid
-        processDate(entry)
-        entry["user"] = new_id
-        if entry["geo"]:
-            entry["geo"] = adapt_geo(entry["geo"])
-        new_id = cursor.execute("insert into tweet_tweet ("+",".join(entry.keys())+") values (:"+",:".join(entry.keys())+");", entry);
-
-
-if __name__ == "__main__" :
-
-    parser = OptionParser()
-    parser.add_option("-f", "--file", dest="filename",  
-                      help="write tweet to FILE", metavar="FILE", default="enmi2010_twitter_rest.db")
-    parser.add_option("-r", "--rpp", dest="rpp",
-                      help="Results per page", metavar="RESULT_PER_PAGE", default=200, type='int')
-    parser.add_option("-p", "--page", dest="page",
-                      help="page result", metavar="PAGE", default=1, type='int')
-    parser.add_option("-t", "--total-page", dest="total_page",
-                      help="Total page number", metavar="TOTAL_PAGE", default=16, type='int')
-    parser.add_option("-s", "--screenname", dest="screen_name",
-                      help="Twitter screen name", metavar="SCREEN_NAME")
-    parser.add_option("-u", "--user", dest="username",
-                      help="Twitter user", metavar="USER", default=None)
-    parser.add_option("-w", "--password", dest="password",
-                      help="Twitter password", metavar="PASSWORD", default=None)
-    parser.add_option("-n", "--new", dest="new", action="store_true",
-                      help="new database", default=False)
-    parser.add_option("-d", "--debug", dest="debug", action="store_true",
-                      help="debug", default=False)
-    
-
-
-    (options, args) = parser.parse_args()
-    
-    if options.debug:
-    	print "OPTIONS : "
-    	print repr(options)
-
-    if options.screen_name is None:
-        print "No Screen name. Exiting"
-        sys.exit()
-    
-    if options.new and os.path.exists(options.filename):
-        os.remove(options.filename)
-    
-    conn = connect(options.filename)
-    conn.row_factory = Row
-    curs = conn.cursor()
-
-    curs.execute("create table if not exists tweet_user ("+ ",".join(columns_user) +", created_at_ts integer);")
-
-    curs.execute("create table if not exists tweet_tweet ("+ ",".join(columns_tweet) +", created_at_ts integer);")
-    curs.execute("create index if not exists id_index on tweet_tweet (id asc);");
-    
-    curs.execute("select count(*) from tweet_tweet;")
-    res = curs.fetchone()
-    
-    old_total = res[0]
-
-    twitter = twython.setup(username=options.username, password=options.password, headers="IRI enmi (python urllib)")
-    twitter = twython.Twython(twitter_token = "54ThDZhpEjokcMgHJOMnQA", twitter_secret = "wUoL9UL2T87tfc97R0Dff2EaqRzpJ5XGdmaN2XK3udA")
-
-    search_results = None
-    page = options.page-1
-
-    while (page < options.total_page and ( search_results is None  or len(search_results) > 0)):
-        page += 1
-        try:
-            search_results = twitter.getUserTimeline(screen_name=options.screen_name, count=options.rpp, page=page)
-        except twython.TwythonError, (e):
-            print "NAME : "+ options.screen_name + " ERROR : " + repr(e.msg)
-            break
-        print "NAME : "+ options.screen_name +" PAGE : " + repr(page) + " tweet: " + repr(len(search_results)) + " (total page : " + unicode(options.total_page) + " : rpp : "+unicode(options.rpp)+")"
-        processPage(search_results, curs, options.debug)
-
-    conn.commit() 
-
-    curs.execute("select count(*) from tweet_tweet;")
-    res = curs.fetchone()
-
-    total = res[0]
-
-    print "Tweet for " + options.screen_name + " : " + unicode(total - old_total) +", Tweet total : " + repr(total)
-
-    conn.close()
-
-
--- a/script/rest/export_twitter.py	Tue May 07 18:28:26 2013 +0200
+++ b/script/rest/export_twitter.py	Fri May 10 13:34:40 2013 +0200
@@ -1,16 +1,15 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-from sqlite3 import *
+from sqlite3 import register_adapter, register_converter, connect, Row
 import datetime, time
 import email.utils
 from optparse import OptionParser
 import os.path
-import os
-import sys
 from lxml import etree
 import uuid
 import re
+import simplejson
 
 def parse_date(date_str):
     ts = email.utils.parsedate_tz(date_str)
@@ -20,10 +19,10 @@
     return time.mktime(ts.timetuple())
     
 def adapt_geo(geo):
-	return simplejson.dumps(geo)
-	
+    return simplejson.dumps(geo)
+
 def convert_geo(s):
-	return simplejson.loads(s)
+    return simplejson.loads(s)
 
 
 register_adapter(datetime.datetime, adapt_datetime)
@@ -73,7 +72,7 @@
     ts = int(parse_date(options.start_date))
 
     if options.end_date:
-    	te = int(parse_date(options.end_date))
+        te = int(parse_date(options.end_date))
     else:
         te = ts + options.duration
     
--- a/script/rest/getscreennames.py	Tue May 07 18:28:26 2013 +0200
+++ b/script/rest/getscreennames.py	Fri May 10 13:34:40 2013 +0200
@@ -1,11 +1,5 @@
-from sqlite3 import *
-import datetime, time
-import email.utils
 from optparse import OptionParser
-import os.path
-import os
-import sys
-import simplejson
+from sqlite3 import connect, Row
 import re
 
 if __name__ == "__main__" :
--- a/script/rest/search_twitter.py	Tue May 07 18:28:26 2013 +0200
+++ b/script/rest/search_twitter.py	Fri May 10 13:34:40 2013 +0200
@@ -1,10 +1,8 @@
-from iri_tweet import models, utils
-from sqlalchemy.orm import sessionmaker
+from iri_tweet import models, processor
+from optparse import OptionParser
 import anyjson
-import sqlite3
+import re
 import twitter
-import re
-from optparse import OptionParser
 
 
 def get_option():
@@ -59,7 +57,7 @@
                 print tweet
                 tweet_str = anyjson.serialize(tweet)
                 #invalidate user id
-                processor = utils.TwitterProcessor(tweet, tweet_str, None, session, None, options.token_filename)
+                processor = processor.TwitterProcessorStatus(json_dict=tweet, json_txt=tweet_str, source_id=None, session=session, consumer_token=None, access_token=None, token_filename=options.token_filename, user_query_twitter=False, logger=None)
                 processor.process()
                 session.flush()
                 session.commit()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/script/stream/recorder_stream.py	Fri May 10 13:34:40 2013 +0200
@@ -0,0 +1,603 @@
+from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
+from iri_tweet import models, utils
+from iri_tweet.models import TweetSource, TweetLog, ProcessEvent
+from iri_tweet.processor import get_processor
+from multiprocessing import Queue as mQueue, Process, Event
+from sqlalchemy.exc import OperationalError
+from sqlalchemy.orm import scoped_session
+import Queue
+import StringIO
+import anyjson
+import argparse
+import datetime
+import inspect
+import iri_tweet.stream
+import logging
+import os
+import re
+import requests_oauthlib
+import shutil
+import signal
+import socket
+import sqlalchemy.schema
+import sys
+import thread
+import threading
+import time
+import traceback
+import urllib2
+socket._fileobject.default_bufsize = 0
+
+
+
+# columns_tweet = [u'favorited', u'truncated', u'text', u'created_at', u'source', u'in_reply_to_status_id', u'in_reply_to_screen_name', u'in_reply_to_user_id', u'geo', u'id', u'user']
+columns_tweet = [u'user', u'favorited', u'contributors', u'truncated', u'text', u'created_at', u'retweeted', u'in_reply_to_status_id_str', u'coordinates', u'in_reply_to_user_id_str', u'entities', u'in_reply_to_status_id', u'place', u'in_reply_to_user_id', u'id', u'in_reply_to_screen_name', u'retweet_count', u'geo', u'id_str', u'source']
+# columns_user = [u'id', u'verified', u'profile_sidebar_fill_color', u'profile_text_color', u'followers_count', u'protected', u'location', u'profile_background_color', u'utc_offset', u'statuses_count', u'description', u'friends_count', u'profile_link_color', u'profile_image_url', u'notifications', u'geo_enabled', u'profile_background_image_url', u'screen_name', u'profile_background_tile', u'favourites_count', u'name', u'url', u'created_at', u'time_zone', u'profile_sidebar_border_color', u'following']
+columns_user = [u'follow_request_sent', u'profile_use_background_image', u'id', u'verified', u'profile_sidebar_fill_color', u'profile_text_color', u'followers_count', u'protected', u'location', u'profile_background_color', u'id_str', u'utc_offset', u'statuses_count', u'description', u'friends_count', u'profile_link_color', u'profile_image_url', u'notifications', u'show_all_inline_media', u'geo_enabled', u'profile_background_image_url', u'name', u'lang', u'following', u'profile_background_tile', u'favourites_count', u'screen_name', u'url', u'created_at', u'contributors_enabled', u'time_zone', u'profile_sidebar_border_color', u'is_translator', u'listed_count']
+# just put it in a sqlite3 tqble
+
+DEFAULT_TIMEOUT = 3
+
+class Requesthandler(BaseHTTPRequestHandler):
+
+    def __init__(self, request, client_address, server):
+        BaseHTTPRequestHandler.__init__(self, request, client_address, server)
+        
+    def do_GET(self):
+        self.send_response(200)
+        self.end_headers()
+    
+    def log_message(self, format, *args):        # @ReservedAssignment
+        pass
+
+
+def set_logging(options):
+    loggers = []
+    
+    loggers.append(utils.set_logging(options, logging.getLogger('iri.tweet')))
+    loggers.append(utils.set_logging(options, logging.getLogger('multiprocessing')))
+    if options.debug >= 2:
+        loggers.append(utils.set_logging(options, logging.getLogger('sqlalchemy.engine')))
+    # utils.set_logging(options, logging.getLogger('sqlalchemy.dialects'))
+    # utils.set_logging(options, logging.getLogger('sqlalchemy.pool'))
+    # utils.set_logging(options, logging.getLogger('sqlalchemy.orm'))
+    return loggers
+
+def set_logging_process(options, queue):
+    qlogger = utils.set_logging(options, logging.getLogger('iri.tweet.p'), queue)
+    qlogger.propagate = 0
+    return qlogger
+
+def get_auth(options, access_token):
+    consumer_key = options.consumer_key
+    consumer_secret = options.consumer_secret
+    auth = requests_oauthlib.OAuth1(client_key=consumer_key, client_secret=consumer_secret, resource_owner_key=access_token[0], resource_owner_secret=access_token[1], signature_type='auth_header')
+    return auth
+
+
+def add_process_event(event_type, args, session_maker):
+    session = session_maker()
+    try:
+        evt = ProcessEvent(args=None if args is None else anyjson.serialize(args), type=event_type)
+        session.add(evt)
+        session.commit()
+    finally:
+        session.close()
+
+
+class BaseProcess(Process):
+
+    def __init__(self, session_maker, queue, options, access_token, stop_event, logger_queue, parent_pid):
+        self.parent_pid = parent_pid
+        self.session_maker = session_maker
+        self.queue = queue
+        self.options = options
+        self.logger_queue = logger_queue
+        self.stop_event = stop_event
+        self.consumer_token = (options.consumer_key, options.consumer_secret)
+        self.access_token = access_token
+
+        super(BaseProcess, self).__init__()
+
+    #
+    # from http://stackoverflow.com/questions/2542610/python-daemon-doesnt-kill-its-kids
+    #
+    def parent_is_alive(self):
+        try:
+            # try to call Parent
+            os.kill(self.parent_pid, 0)
+        except OSError:
+            # *beeep* oh no! The phone's disconnected!
+            return False
+        else:
+            # *ring* Hi mom!
+            return True
+    
+
+    def __get_process_event_args(self):
+        return {'name':self.name, 'pid':self.pid, 'parent_pid':self.parent_pid, 'options':self.options.__dict__, 'access_token':self.access_token}
+
+    def run(self):
+        try:
+            add_process_event("start_worker", self.__get_process_event_args(), self.session_maker)
+            self.do_run()
+        finally:
+            add_process_event("stop_worker", self.__get_process_event_args(), self.session_maker)
+        
+    def do_run(self):
+        raise NotImplementedError()
+
+
+
+class SourceProcess(BaseProcess):
+    
+    def __init__(self, session_maker, queue, options, access_token, stop_event, logger_queue, parent_pid):
+        self.track = options.track
+        self.token_filename = options.token_filename
+        self.timeout = options.timeout
+        self.stream = None
+        super(SourceProcess, self).__init__(session_maker, queue, options, access_token, stop_event, logger_queue, parent_pid)
+                    
+    def __source_stream_iter(self):
+                
+        self.logger.debug("SourceProcess : run ")
+        
+        self.logger.debug("SourceProcess : get_auth auth with option %s and token %s " %(self.options, self.access_token))
+        self.auth = get_auth(self.options, self.access_token) 
+        self.logger.debug("SourceProcess : auth set ")
+        
+        track_list = self.track  # or raw_input('Keywords to track (comma seperated): ').strip()
+        self.logger.debug("SourceProcess : track list " + track_list)
+        
+        track_list = [k.strip() for k in track_list.split(',')]
+
+        self.logger.debug("SourceProcess : before connecting to stream %s, url : %s, auth : %s" % (repr(track_list), self.options.url, repr(self.auth)))                        
+        self.stream = iri_tweet.stream.FilterStream(self.auth, track=track_list, raw=True, url=self.options.url, timeout=self.timeout, chunk_size=512, logger=self.logger)
+        self.logger.debug("SourceProcess : after connecting to stream")
+        self.stream.muststop = lambda: self.stop_event.is_set()        
+        
+        stream_wrapper = iri_tweet.stream.SafeStreamWrapper(self.stream, logger=self.logger)
+        
+        session = self.session_maker()
+        
+        #import pydevd
+        #pydevd.settrace(suspend=False)
+
+        
+        try:
+            for tweet in stream_wrapper:
+                if not self.parent_is_alive():
+                    self.stop_event.set()
+                    sys.exit()
+                self.logger.debug("SourceProcess : tweet " + repr(tweet))
+                source = TweetSource(original_json=tweet)
+                self.logger.debug("SourceProcess : source created")
+                add_retries = 0
+                while add_retries < 10:
+                    try:
+                        add_retries += 1
+                        session.add(source)
+                        session.flush()
+                        break
+                    except OperationalError as e:
+                        session.rollback()
+                        self.logger.debug("SourceProcess : Operational Error %s nb %d" % (repr(e), add_retries))
+                        if add_retries == 10:
+                            raise
+                     
+                source_id = source.id
+                self.logger.debug("SourceProcess : before queue + source id " + repr(source_id))
+                self.logger.info("SourceProcess : Tweet count: %d - current rate : %.2f - running : %s" % (self.stream.count, self.stream.rate, int(time.time() - self.stream.starttime)))
+                session.commit()
+                self.queue.put((source_id, tweet), False)
+
+        except Exception as e:
+            self.logger.error("SourceProcess : Error when processing tweet " + repr(e))
+            raise
+        finally:
+            session.rollback()
+            session.close()
+            self.stream.close()
+            self.stream = None
+            if not self.stop_event.is_set():
+                self.stop_event.set()
+
+
+    def do_run(self):
+        
+        self.logger = set_logging_process(self.options, self.logger_queue)                
+        
+        source_stream_iter_thread = threading.Thread(target=self.__source_stream_iter , name="SourceStreamIterThread")
+        
+        source_stream_iter_thread.start()
+        
+        try:
+            while not self.stop_event.is_set():
+                self.logger.debug("SourceProcess : In while after start")
+                self.stop_event.wait(DEFAULT_TIMEOUT)
+        except KeyboardInterrupt:
+            self.stop_event.set()
+            pass
+
+        if self.stop_event.is_set() and self.stream:
+            self.stream.close()
+        elif not self.stop_event.is_set() and not source_stream_iter_thread.is_alive:
+            self.stop_event.set()
+
+        self.queue.cancel_join_thread()
+        self.logger_queue.cancel_join_thread()
+        self.logger.info("SourceProcess : join")
+        source_stream_iter_thread.join(30)
+
+
+def process_tweet(tweet, source_id, session, consumer_token, access_token, twitter_query_user, token_filename, logger):
+    try:
+        if not tweet.strip():
+            return
+        tweet_obj = anyjson.deserialize(tweet)
+        processor_klass = get_processor(tweet_obj)
+        if not processor_klass:
+            tweet_log = TweetLog(tweet_source_id=source_id, status=TweetLog.TWEET_STATUS['NOT_TWEET'])
+            session.add(tweet_log)
+            return
+        processor = processor_klass(json_dict=tweet_obj,
+                                    json_txt=tweet,
+                                    source_id=source_id,
+                                    session=session,
+                                    consumer_token=consumer_token,
+                                    access_token=access_token,
+                                    token_filename=token_filename,
+                                    user_query_twitter=twitter_query_user,
+                                    logger=logger)
+        logger.info(processor.log_info())                        
+        logger.debug(u"Process_tweet :" + repr(tweet))                
+        processor.process()
+        
+    except ValueError as e:
+        message = u"Value Error %s processing tweet %s" % (repr(e), tweet)
+        output = StringIO.StringIO()
+        try:
+            traceback.print_exc(file=output)
+            error_stack = output.getvalue()
+        finally:
+            output.close()
+        tweet_log = TweetLog(tweet_source_id=source_id, status=TweetLog.TWEET_STATUS['NOT_TWEET'], error=message, error_stack=error_stack)
+        session.add(tweet_log)
+        session.commit()        
+    except Exception as e:
+        message = u"Error %s processing tweet %s" % (repr(e), tweet)
+        logger.exception(message)
+        output = StringIO.StringIO()
+        try:
+            traceback.print_exc(file=output)
+            error_stack = output.getvalue()
+        finally:
+            output.close()
+        session.rollback()
+        tweet_log = TweetLog(tweet_source_id=source_id, status=TweetLog.TWEET_STATUS['ERROR'], error=message, error_stack=error_stack)
+        session.add(tweet_log)
+        session.commit()
+
+    
+        
+class TweetProcess(BaseProcess):
+    
+    def __init__(self, session_maker, queue, options, access_token, stop_event, logger_queue, parent_pid):
+        super(TweetProcess, self).__init__(session_maker, queue, options, access_token, stop_event, logger_queue, parent_pid)
+        self.twitter_query_user = options.twitter_query_user
+
+
+    def do_run(self):
+        
+        self.logger = set_logging_process(self.options, self.logger_queue)
+        session = self.session_maker()
+        try:
+            while not self.stop_event.is_set() and self.parent_is_alive():
+                try:
+                    source_id, tweet_txt = self.queue.get(True, 3)
+                    self.logger.debug("Processing source id " + repr(source_id))
+                except Exception as e:
+                    self.logger.debug('Process tweet exception in loop : ' + repr(e))
+                    continue
+                process_tweet(tweet_txt, source_id, session, self.consumer_token, self.access_token, self.twitter_query_user, self.options.token_filename, self.logger)
+                session.commit()
+        except KeyboardInterrupt:
+            self.stop_event.set()
+        finally:
+            session.rollback()
+            session.close()
+
+
+def get_sessionmaker(conn_str):
+    engine, metadata, Session = models.setup_database(conn_str, echo=False, create_all=False, autocommit=False)
+    Session = scoped_session(Session)
+    return Session, engine, metadata
+
+            
+def process_leftovers(session, consumer_token, access_token, twitter_query_user, token_filename, ask_process_leftovers, logger):
+    
+    sources = session.query(TweetSource).outerjoin(TweetLog).filter(TweetLog.id == None)
+    sources_count = sources.count()
+    
+    if sources_count > 10 and ask_process_leftovers:
+        resp = raw_input("Do you want to process leftovers (Y/n) ? (%d tweet to process)" % sources_count)
+        if resp and resp.strip().lower() == "n":
+            return
+    logger.info("Process leftovers, %d tweets to process" % (sources_count))
+    for src in sources:
+        tweet_txt = src.original_json
+        process_tweet(tweet_txt, src.id, session, consumer_token, access_token, twitter_query_user, token_filename, logger)
+        session.commit()
+        
+    
+def process_log(logger_queues, stop_event):
+    while not stop_event.is_set():
+        for lqueue in logger_queues:
+            try:
+                record = lqueue.get_nowait()
+                logging.getLogger(record.name).handle(record)
+            except Queue.Empty:
+                continue
+            except IOError:
+                continue
+        time.sleep(0.1)
+
+        
+def get_options():
+
+    usage = "usage: %(prog)s [options]"
+
+    parser = argparse.ArgumentParser(usage=usage)
+
+    parser.add_argument("-f", "--file", dest="conn_str",
+                        help="write tweet to DATABASE. This is a connection string", metavar="CONNECTION_STR", default="enmi2010_twitter.db")
+    parser.add_argument("-T", "--track", dest="track",
+                        help="Twitter track", metavar="TRACK")
+    parser.add_argument("-k", "--key", dest="consumer_key",
+                        help="Twitter consumer key", metavar="CONSUMER_KEY", required=True)
+    parser.add_argument("-s", "--secret", dest="consumer_secret",
+                        help="Twitter consumer secret", metavar="CONSUMER_SECRET", required=True)
+    parser.add_argument("-n", "--new", dest="new", action="store_true",
+                        help="new database", default=False)
+    parser.add_argument("-D", "--daemon", dest="daemon", action="store_true",
+                        help="launch daemon", default=False)
+    parser.add_argument("-t", dest="token_filename", metavar="TOKEN_FILENAME", default=".oauth_token",
+                        help="Token file name")
+    parser.add_argument("-d", "--duration", dest="duration",
+                        help="Duration of recording in seconds", metavar="DURATION", default= -1, type=int)
+    parser.add_argument("-N", "--nb-process", dest="process_nb",
+                        help="number of process.\nIf 0, only the lefovers of the database are processed.\nIf 1, no postprocessing is done on the tweets.", metavar="PROCESS_NB", default=2, type=int)
+    parser.add_argument("--url", dest="url",
+                        help="The twitter url to connect to.", metavar="URL", default=iri_tweet.stream.FilterStream.url)
+    parser.add_argument("--query-user", dest="twitter_query_user", action="store_true",
+                        help="Query twitter for users", default=False)
+    parser.add_argument("--timeout", dest="timeout",
+                        help="timeout for connecting in seconds", default=60, metavar="TIMEOUT", type=int)
+    parser.add_argument("--ask-process-leftovers", dest="ask_process_leftovers", action="store_false",
+                        help="ask process leftover", default=True)
+
+
+    utils.set_logging_options(parser)
+
+    return parser.parse_args()
+
+
+def do_run(options, session_maker):
+
+    stop_args = {}
+    
+    consumer_token = (options.consumer_key, options.consumer_secret)
+    access_token = utils.get_oauth_token(consumer_key=consumer_token[0], consumer_secret=consumer_token[1], token_file_path=options.token_filename)
+    
+    
+    session = session_maker()
+    try:
+        process_leftovers(session, consumer_token, access_token, options.twitter_query_user, options.token_filename, options.ask_process_leftovers, utils.get_logger())
+        session.commit()
+    finally:
+        session.rollback()
+        session.close()
+    
+    if options.process_nb <= 0:
+        utils.get_logger().debug("Leftovers processed. Exiting.")
+        return None
+
+    queue = mQueue()
+    stop_event = Event()
+    
+    # workaround for bug on using urllib2 and multiprocessing
+    httpd = HTTPServer(('127.0.0.1',0), Requesthandler)
+    thread.start_new_thread(httpd.handle_request, ())
+    
+    req = urllib2.Request('http://localhost:%d' % httpd.server_port)
+    conn = None
+    try:
+        conn = urllib2.urlopen(req)
+    except:
+        utils.get_logger().debug("could not open localhost")
+        # donothing
+    finally:
+        if conn is not None:
+            conn.close()
+    
+    process_engines = []
+    logger_queues = []
+    
+    SessionProcess, engine_process, _ = get_sessionmaker(conn_str)
+    process_engines.append(engine_process)
+    lqueue = mQueue(50)
+    logger_queues.append(lqueue)
+    pid = os.getpid()
+    sprocess = SourceProcess(SessionProcess, queue, options, access_token, stop_event, lqueue, pid)    
+    
+    tweet_processes = []
+    
+    for i in range(options.process_nb - 1):
+        SessionProcess, engine_process, _ = get_sessionmaker(conn_str)
+        process_engines.append(engine_process)
+        lqueue = mQueue(50)
+        logger_queues.append(lqueue)
+        cprocess = TweetProcess(SessionProcess, queue, options, access_token, stop_event, lqueue, pid)
+        tweet_processes.append(cprocess)
+
+    log_thread = threading.Thread(target=process_log, name="loggingThread", args=(logger_queues, stop_event,))
+    log_thread.daemon = True
+
+    log_thread.start()
+
+    sprocess.start()
+    for cprocess in tweet_processes:
+        cprocess.start()
+
+    add_process_event("pid", {'main':os.getpid(), 'source':(sprocess.name, sprocess.pid), 'consumers':dict([(p.name, p.pid) for p in tweet_processes])}, session_maker)
+
+    if options.duration >= 0:
+        end_ts = datetime.datetime.utcnow() + datetime.timedelta(seconds=options.duration)    
+
+    def interupt_handler(signum, frame):
+        utils.get_logger().debug("shutdown asked " + repr(signum) + "  " + repr(inspect.getframeinfo(frame, 9)))
+        stop_args.update({'message': 'interupt', 'signum':signum, 'frameinfo':inspect.getframeinfo(frame, 9)})
+        stop_event.set()
+        
+    signal.signal(signal.SIGINT , interupt_handler)
+    signal.signal(signal.SIGHUP , interupt_handler)
+    signal.signal(signal.SIGALRM, interupt_handler)
+    signal.signal(signal.SIGTERM, interupt_handler)
+    
+
+    while not stop_event.is_set():
+        if options.duration >= 0 and  datetime.datetime.utcnow() >= end_ts:
+            stop_args.update({'message': 'duration', 'duration' : options.duration, 'end_ts' : end_ts})
+            stop_event.set()
+            break
+        if sprocess.is_alive():
+            utils.get_logger().debug("Source process alive")
+            time.sleep(1)
+        else:
+            stop_args.update({'message': 'Source process killed'})
+            stop_event.set()
+            break
+    utils.get_logger().debug("Joining Source Process")
+    try:
+        sprocess.join(10)
+    except:
+        utils.get_logger().debug("Pb joining Source Process - terminating")
+    finally:
+        sprocess.terminate()
+        
+    for i, cprocess in enumerate(tweet_processes):
+        utils.get_logger().debug("Joining consumer process Nb %d" % (i + 1))
+        try:
+            cprocess.join(3)
+        except:
+            utils.get_logger().debug("Pb joining consumer process Nb %d - terminating" % (i + 1))
+            cprocess.terminate()
+
+    
+    utils.get_logger().debug("Close queues")
+    try:
+        queue.close()
+        for lqueue in logger_queues:
+            lqueue.close()
+    except Exception as e:
+        utils.get_logger().error("error when closing queues %s", repr(e))
+        # do nothing
+        
+    
+    if options.process_nb > 1:
+        utils.get_logger().debug("Processing leftovers")
+        session = session_maker()
+        try:
+            process_leftovers(session, consumer_token, access_token, options.twitter_query_user, options.token_filename, options.ask_process_leftovers, utils.get_logger())
+            session.commit()
+        finally:
+            session.rollback()
+            session.close()
+
+    for pengine in process_engines:
+        pengine.dispose()
+    
+    return stop_args
+
+
+def main(options):
+    
+    global conn_str
+    
+    conn_str = options.conn_str.strip()
+    if not re.match("^\w+://.+", conn_str):
+        conn_str = 'sqlite:///' + options.conn_str
+        
+    if conn_str.startswith("sqlite") and options.new:
+        filepath = conn_str[conn_str.find(":///") + 4:]
+        if os.path.exists(filepath):
+            i = 1
+            basename, extension = os.path.splitext(filepath)
+            new_path = '%s.%d%s' % (basename, i, extension)
+            while i < 1000000 and os.path.exists(new_path):
+                i += 1
+                new_path = '%s.%d%s' % (basename, i, extension)
+            if i >= 1000000:
+                raise Exception("Unable to find new filename for " + filepath)
+            else:
+                shutil.move(filepath, new_path)
+
+    Session, engine, metadata = get_sessionmaker(conn_str)
+    
+    if options.new:
+        check_metadata = sqlalchemy.schema.MetaData(bind=engine)
+        check_metadata.reflect()
+        if len(check_metadata.sorted_tables) > 0:
+            message = "Database %s not empty exiting" % conn_str
+            utils.get_logger().error(message)
+            sys.exit(message)
+    
+    metadata.create_all(engine)
+    session = Session()
+    try:
+        models.add_model_version(session)
+    finally:
+        session.close()
+    
+    stop_args = {}
+    try:
+        add_process_event(event_type="start", args={'options':options.__dict__, 'args': [], 'command_line': sys.argv}, session_maker=Session)
+        stop_args = do_run(options, Session)
+    except Exception as e:
+        utils.get_logger().exception("Error in main thread")        
+        outfile = StringIO.StringIO()
+        try:
+            traceback.print_exc(file=outfile)
+            stop_args = {'error': repr(e), 'message': getattr(e, 'message', ''), 'stacktrace':outfile.getvalue()}
+        finally:
+            outfile.close()
+        raise
+    finally:    
+        add_process_event(event_type="shutdown", args=stop_args, session_maker=Session)
+
+    utils.get_logger().debug("Done. Exiting. " + repr(stop_args))
+
+
+
+if __name__ == '__main__':
+
+    options = get_options()
+    
+    loggers = set_logging(options)
+    
+    utils.get_logger().debug("OPTIONS : " + repr(options))
+    
+    if options.daemon:
+        options.ask_process_leftovers = False
+        import daemon
+        
+        hdlr_preserve = []
+        for logger in loggers:
+            hdlr_preserve.extend([h.stream for h in logger.handlers])
+            
+        context = daemon.DaemonContext(working_directory=os.getcwd(), files_preserve=hdlr_preserve) 
+        with context:
+            main(options)
+    else:
+        main(options)
+    
--- a/script/stream/recorder_tweetstream.py	Tue May 07 18:28:26 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,528 +0,0 @@
-from getpass import getpass
-from iri_tweet import models, utils
-from iri_tweet.models import TweetSource, TweetLog, ProcessEvent
-from multiprocessing import (Queue as mQueue, JoinableQueue, Process, Event, 
-    get_logger)
-from optparse import OptionParser
-from sqlalchemy.exc import OperationalError
-from sqlalchemy.orm import scoped_session
-import Queue
-import StringIO
-import anyjson
-import datetime
-import inspect
-import logging
-import os
-import re
-import shutil
-import signal
-import socket
-import sqlalchemy.schema
-import sys
-import threading
-import time
-import traceback
-import tweepy.auth
-import tweetstream
-import urllib2
-socket._fileobject.default_bufsize = 0
-
-
-
-#columns_tweet = [u'favorited', u'truncated', u'text', u'created_at', u'source', u'in_reply_to_status_id', u'in_reply_to_screen_name', u'in_reply_to_user_id', u'geo', u'id', u'user']
-columns_tweet = [u'user', u'favorited', u'contributors', u'truncated', u'text', u'created_at', u'retweeted', u'in_reply_to_status_id_str', u'coordinates', u'in_reply_to_user_id_str', u'entities', u'in_reply_to_status_id', u'place', u'in_reply_to_user_id', u'id', u'in_reply_to_screen_name', u'retweet_count', u'geo', u'id_str', u'source']
-#columns_user = [u'id', u'verified', u'profile_sidebar_fill_color', u'profile_text_color', u'followers_count', u'protected', u'location', u'profile_background_color', u'utc_offset', u'statuses_count', u'description', u'friends_count', u'profile_link_color', u'profile_image_url', u'notifications', u'geo_enabled', u'profile_background_image_url', u'screen_name', u'profile_background_tile', u'favourites_count', u'name', u'url', u'created_at', u'time_zone', u'profile_sidebar_border_color', u'following']
-columns_user = [u'follow_request_sent', u'profile_use_background_image', u'id', u'verified', u'profile_sidebar_fill_color', u'profile_text_color', u'followers_count', u'protected', u'location', u'profile_background_color', u'id_str', u'utc_offset', u'statuses_count', u'description', u'friends_count', u'profile_link_color', u'profile_image_url', u'notifications', u'show_all_inline_media', u'geo_enabled', u'profile_background_image_url', u'name', u'lang', u'following', u'profile_background_tile', u'favourites_count', u'screen_name', u'url', u'created_at', u'contributors_enabled', u'time_zone', u'profile_sidebar_border_color', u'is_translator', u'listed_count']
-#just put it in a sqlite3 tqble
-
-
-def set_logging(options):
-    loggers = []
-    
-    loggers.append(utils.set_logging(options, logging.getLogger('iri.tweet')))
-    loggers.append(utils.set_logging(options, logging.getLogger('multiprocessing')))
-    if options.debug >= 2:
-        loggers.append(utils.set_logging(options, logging.getLogger('sqlalchemy.engine')))
-    #utils.set_logging(options, logging.getLogger('sqlalchemy.dialects'))
-    #utils.set_logging(options, logging.getLogger('sqlalchemy.pool'))
-    #utils.set_logging(options, logging.getLogger('sqlalchemy.orm'))
-    return loggers
-
-def set_logging_process(options, queue):
-    qlogger = utils.set_logging(options, logging.getLogger('iri.tweet.p'), queue)
-    qlogger.propagate = 0
-    return qlogger
-
-def get_auth(options, access_token):
-    if options.username and options.password:
-        auth = tweepy.auth.BasicAuthHandler(options.username, options.password)        
-    else:
-        consumer_key = models.CONSUMER_KEY
-        consumer_secret = models.CONSUMER_SECRET
-        auth = tweepy.auth.OAuthHandler(consumer_key, consumer_secret, secure=False)
-        auth.set_access_token(*access_token)
-    return auth
-
-
-def add_process_event(type, args, session_maker):
-    session = session_maker()
-    try:
-        evt = ProcessEvent(args=None if args is None else anyjson.serialize(args), type=type)
-        session.add(evt)
-        session.commit()
-    finally:
-        session.close()
-
-
-class BaseProcess(Process):
-
-    def __init__(self, session_maker, queue, options, access_token, stop_event, logger_queue, parent_pid):
-        self.parent_pid = parent_pid
-        self.session_maker = session_maker
-        self.queue = queue
-        self.options = options
-        self.logger_queue = logger_queue
-        self.stop_event = stop_event
-        self.access_token = access_token
-
-        super(BaseProcess, self).__init__()
-
-    #
-    # from http://stackoverflow.com/questions/2542610/python-daemon-doesnt-kill-its-kids
-    #
-    def parent_is_alive(self):
-        try:
-            # try to call Parent
-            os.kill(self.parent_pid, 0)
-        except OSError:
-            # *beeep* oh no! The phone's disconnected!
-            return False
-        else:
-            # *ring* Hi mom!
-            return True
-    
-
-    def __get_process_event_args(self):
-        return {'name':self.name, 'pid':self.pid, 'parent_pid':self.parent_pid, 'options':self.options.__dict__, 'access_token':self.access_token}
-
-    def run(self):
-        try:
-            add_process_event("start_worker", self.__get_process_event_args(), self.session_maker)
-            self.do_run()
-        finally:
-            add_process_event("stop_worker", self.__get_process_event_args(), self.session_maker)
-        
-    def do_run(self):
-        raise NotImplementedError()
-
-
-
-class SourceProcess(BaseProcess):
-    
-    def __init__(self, session_maker, queue, options, access_token, stop_event, logger_queue, parent_pid):
-        self.track = options.track
-        self.token_filename = options.token_filename
-        self.catchup = options.catchup
-        self.timeout = options.timeout
-        super(SourceProcess, self).__init__(session_maker, queue, options, access_token, stop_event, logger_queue, parent_pid)
-
-    def do_run(self):
-        
-        #import pydevd
-        #pydevd.settrace(suspend=True)
-
-        self.logger = set_logging_process(self.options, self.logger_queue)
-        self.auth = get_auth(self.options, self.access_token) 
-        
-        self.logger.debug("SourceProcess : run ")
-        track_list = self.track # or raw_input('Keywords to track (comma seperated): ').strip()
-        self.logger.debug("SourceProcess : track list " + track_list)
-        
-        track_list = [k.strip() for k in track_list.split(',')]
-
-        self.logger.debug("SourceProcess : before connecting to stream " + repr(track_list))                        
-        stream = tweetstream.FilterStream(self.auth, track=track_list, raw=True, url=self.options.url, catchup=self.catchup, timeout=self.timeout)
-        self.logger.debug("SourceProcess : after connecting to stream")
-        stream.muststop = lambda: self.stop_event.is_set()
-        
-        session = self.session_maker()
-        
-        try:
-            for tweet in stream:
-                if not self.parent_is_alive():
-                    sys.exit()
-                self.logger.debug("SourceProcess : tweet " + repr(tweet))
-                source = TweetSource(original_json=tweet)
-                self.logger.debug("SourceProcess : source created")
-                add_retries = 0
-                while add_retries < 10:
-                    try:
-                        add_retries += 1
-                        session.add(source)
-                        session.flush()
-                        break
-                    except OperationalError as e:
-                        session.rollback()
-                        self.logger.debug("SourceProcess : Operational Error %s nb %d" % (repr(e), add_retries))
-                        if add_retries == 10:
-                            raise e
-                     
-                source_id = source.id
-                self.logger.debug("SourceProcess : before queue + source id " + repr(source_id))
-                self.logger.info("SourceProcess : Tweet count: %d - current rate : %.2f - running : %s" % (stream.count, stream.rate, int(time.time() - stream.starttime)))
-                session.commit()
-                self.queue.put((source_id, tweet), False)
-
-        except Exception as e:
-            self.logger.error("SourceProcess : Error when processing tweet " + repr(e))
-        finally:
-            session.rollback()
-            stream.close()
-            session.close()
-            self.queue.close()
-            self.stop_event.set()
-
-
-def process_tweet(tweet, source_id, session, access_token, twitter_query_user, logger):
-    try:
-        tweet_obj = anyjson.deserialize(tweet)
-        if 'text' not in tweet_obj:
-            tweet_log = TweetLog(tweet_source_id=source_id, status=TweetLog.TWEET_STATUS['NOT_TWEET'])
-            session.add(tweet_log)
-            return
-        screen_name = ""
-        if 'user' in tweet_obj and 'screen_name' in tweet_obj['user']:
-            screen_name = tweet_obj['user']['screen_name']
-        logger.info(u"Process_tweet from %s : %s" % (screen_name, tweet_obj['text']))
-        logger.debug(u"Process_tweet :" + repr(tweet))
-        processor = utils.TwitterProcessor(tweet_obj, tweet, source_id, session, access_token, None, twitter_query_user)
-        processor.process()
-    except Exception as e:
-        message = u"Error %s processing tweet %s" % (repr(e), tweet)
-        logger.exception(message)
-        output = StringIO.StringIO()
-        try:
-            traceback.print_exc(file=output)
-            error_stack = output.getvalue()
-        finally:
-            output.close()
-        session.rollback()
-        tweet_log = TweetLog(tweet_source_id=source_id, status=TweetLog.TWEET_STATUS['ERROR'], error=message, error_stack=error_stack)
-        session.add(tweet_log)
-        session.commit()
-
-    
-        
-class TweetProcess(BaseProcess):
-    
-    def __init__(self, session_maker, queue, options, access_token, stop_event, logger_queue, parent_pid):
-        super(TweetProcess, self).__init__(session_maker, queue, options, access_token, stop_event, logger_queue, parent_pid)
-        self.twitter_query_user = options.twitter_query_user
-
-
-    def do_run(self):
-        
-        self.logger = set_logging_process(self.options, self.logger_queue)
-        session = self.session_maker()
-        try:
-            while not self.stop_event.is_set() and self.parent_is_alive():
-                try:
-                    source_id, tweet_txt = self.queue.get(True, 3)
-                    self.logger.debug("Processing source id " + repr(source_id))
-                except Exception as e:
-                    self.logger.debug('Process tweet exception in loop : ' + repr(e))
-                    continue
-                process_tweet(tweet_txt, source_id, session, self.access_token, self.twitter_query_user, self.logger)
-                session.commit()
-        finally:
-            session.rollback()
-            self.stop_event.set()
-            session.close()
-
-
-def get_sessionmaker(conn_str):
-    engine, metadata, Session = models.setup_database(conn_str, echo=False, create_all=False, autocommit=False)
-    Session = scoped_session(Session)
-    return Session, engine, metadata
-
-            
-def process_leftovers(session, access_token, twitter_query_user, logger):
-    
-    sources = session.query(TweetSource).outerjoin(TweetLog).filter(TweetLog.id == None)
-    
-    for src in sources:
-        tweet_txt = src.original_json
-        process_tweet(tweet_txt, src.id, session, access_token, twitter_query_user, logger)
-        session.commit()
-
-        
-    
-    #get tweet source that do not match any message
-    #select * from tweet_tweet_source ts left join tweet_tweet_log tl on ts.id = tl.tweet_source_id where tl.id isnull;
-def process_log(logger_queues, stop_event):
-    while not stop_event.is_set():
-        for lqueue in logger_queues:
-            try:
-                record = lqueue.get_nowait()
-                logging.getLogger(record.name).handle(record)
-            except Queue.Empty:
-                continue
-            except IOError:
-                continue
-        time.sleep(0.1)
-
-        
-def get_options():
-
-    usage = "usage: %prog [options]"
-
-    parser = OptionParser(usage=usage)
-
-    parser.add_option("-f", "--file", dest="conn_str",
-                      help="write tweet to DATABASE. This is a connection string", metavar="CONNECTION_STR", default="enmi2010_twitter.db")
-    parser.add_option("-u", "--user", dest="username",
-                      help="Twitter user", metavar="USER", default=None)
-    parser.add_option("-w", "--password", dest="password",
-                      help="Twitter password", metavar="PASSWORD", default=None)
-    parser.add_option("-T", "--track", dest="track",
-                      help="Twitter track", metavar="TRACK")
-    parser.add_option("-n", "--new", dest="new", action="store_true",
-                      help="new database", default=False)
-    parser.add_option("-D", "--daemon", dest="daemon", action="store_true",
-                      help="launch daemon", default=False)
-    parser.add_option("-t", dest="token_filename", metavar="TOKEN_FILENAME", default=".oauth_token",
-                      help="Token file name")
-    parser.add_option("-d", "--duration", dest="duration",
-                      help="Duration of recording in seconds", metavar="DURATION", default= -1, type='int')
-    parser.add_option("-N", "--nb-process", dest="process_nb",
-                      help="number of process.\nIf 0, only the lefovers of the database are processed.\nIf 1, no postprocessing is done on the tweets.", metavar="PROCESS_NB", default=2, type='int')
-    parser.add_option("--url", dest="url",
-                      help="The twitter url to connect to.", metavar="URL", default=tweetstream.FilterStream.url)
-    parser.add_option("--query-user", dest="twitter_query_user", action="store_true",
-                      help="Query twitter for users", default=False, metavar="QUERY_USER")
-    parser.add_option("--catchup", dest="catchup",
-                      help="catchup count for tweets", default=None, metavar="CATCHUP", type='int')
-    parser.add_option("--timeout", dest="timeout",
-                      help="timeout for connecting in seconds", default=60, metavar="TIMEOUT", type='int')
-    
-
-
-
-    utils.set_logging_options(parser)
-
-    return parser.parse_args()
-
-
-def do_run(options, session_maker):
-
-    stop_args = {}
-
-    access_token = None
-    if not options.username or not options.password:
-        access_token = utils.get_oauth_token(options.token_filename)
-    
-    session = session_maker()
-    try:
-        process_leftovers(session, access_token, options.twitter_query_user, utils.get_logger())
-        session.commit()
-    finally:
-        session.rollback()
-        session.close()
-    
-    if options.process_nb <= 0:
-        utils.get_logger().debug("Leftovers processed. Exiting.")
-        return None
-
-    queue = mQueue()
-    stop_event = Event()
-    
-    #workaround for bug on using urllib2 and multiprocessing
-    req = urllib2.Request('http://localhost')
-    conn = None
-    try:
-        conn = urllib2.urlopen(req)
-    except:
-        utils.get_logger().debug("could not open localhost")
-        #donothing
-    finally:
-        if conn is not None:
-            conn.close()
-    
-    process_engines = []
-    logger_queues = []
-    
-    SessionProcess, engine_process, metadata_process = get_sessionmaker(conn_str)
-    process_engines.append(engine_process)
-    lqueue = mQueue(1)
-    logger_queues.append(lqueue)
-    pid = os.getpid()
-    sprocess = SourceProcess(SessionProcess, queue, options, access_token, stop_event, lqueue, pid)    
-    
-    tweet_processes = []
-    
-    for i in range(options.process_nb - 1):
-        SessionProcess, engine_process, metadata_process = get_sessionmaker(conn_str)
-        process_engines.append(engine_process)
-        lqueue = mQueue(1)
-        logger_queues.append(lqueue)
-        cprocess = TweetProcess(SessionProcess, queue, options, access_token, stop_event, lqueue, pid)
-        tweet_processes.append(cprocess)
-
-    def interupt_handler(signum, frame):
-        utils.get_logger().debug("shutdown asked " + repr(signum) + "  " + repr(inspect.getframeinfo(frame, 9)))
-        stop_args.update({'message': 'interupt', 'signum':signum, 'frameinfo':inspect.getframeinfo(frame, 9)})
-        stop_event.set()
-        
-    signal.signal(signal.SIGINT , interupt_handler)
-    signal.signal(signal.SIGHUP , interupt_handler)
-    signal.signal(signal.SIGALRM, interupt_handler)
-    signal.signal(signal.SIGTERM, interupt_handler)
-
-    log_thread = threading.Thread(target=process_log, name="loggingThread", args=(logger_queues, stop_event,))
-    log_thread.daemon = True
-
-    log_thread.start()
-
-    sprocess.start()
-    for cprocess in tweet_processes:
-        cprocess.start()
-
-    add_process_event("pid", {'main':os.getpid(), 'source':(sprocess.name, sprocess.pid), 'consumers':dict([(p.name, p.pid) for p in tweet_processes])}, session_maker)
-
-    if options.duration >= 0:
-        end_ts = datetime.datetime.utcnow() + datetime.timedelta(seconds=options.duration)    
-    
-
-    while not stop_event.is_set():
-        if options.duration >= 0 and  datetime.datetime.utcnow() >= end_ts:
-            stop_args.update({'message': 'duration', 'duration' : options.duration, 'end_ts' : end_ts})
-            stop_event.set()
-            break
-        if sprocess.is_alive():            
-            time.sleep(1)
-        else:
-            stop_args.update({'message': 'Source process killed'})
-            stop_event.set()
-            break
-    utils.get_logger().debug("Joining Source Process")
-    try:
-        sprocess.join(10)
-    except:
-        utils.get_logger().debug("Pb joining Source Process - terminating")
-        sprocess.terminate()
-        
-    for i, cprocess in enumerate(tweet_processes):
-        utils.get_logger().debug("Joining consumer process Nb %d" % (i + 1))
-        try:
-            cprocess.join(3)
-        except:
-            utils.get_logger().debug("Pb joining consumer process Nb %d - terminating" % (i + 1))
-            cprocess.terminate()
-
-    
-    utils.get_logger().debug("Close queues")
-    try:
-        queue.close()
-        for lqueue in logger_queues:
-            lqueue.close()
-    except exception as e:
-        utils.get_logger().error("error when closing queues %s", repr(e))
-        #do nothing
-        
-    
-    if options.process_nb > 1:
-        utils.get_logger().debug("Processing leftovers")
-        session = session_maker()
-        try:
-            process_leftovers(session, access_token, options.twitter_query_user, utils.get_logger())
-            session.commit()
-        finally:
-            session.rollback()
-            session.close()
-
-    for pengine in process_engines:
-        pengine.dispose()
-
-    return stop_args
-
-
-def main(options, args):
-    
-    global conn_str
-    
-    conn_str = options.conn_str.strip()
-    if not re.match("^\w+://.+", conn_str):
-        conn_str = 'sqlite:///' + options.conn_str
-        
-    if conn_str.startswith("sqlite") and options.new:
-        filepath = conn_str[conn_str.find(":///") + 4:]
-        if os.path.exists(filepath):
-            i = 1
-            basename, extension = os.path.splitext(filepath)
-            new_path = '%s.%d%s' % (basename, i, extension)
-            while i < 1000000 and os.path.exists(new_path):
-                i += 1
-                new_path = '%s.%d%s' % (basename, i, extension)
-            if i >= 1000000:
-                raise Exception("Unable to find new filename for " + filepath)
-            else:
-                shutil.move(filepath, new_path)
-
-    Session, engine, metadata = get_sessionmaker(conn_str)
-    
-    if options.new:
-        check_metadata = sqlalchemy.schema.MetaData(bind=engine, reflect=True)
-        if len(check_metadata.sorted_tables) > 0:
-            message = "Database %s not empty exiting" % conn_str
-            utils.get_logger().error(message)
-            sys.exit(message)
-    
-    metadata.create_all(engine)
-    session = Session()
-    try:
-        models.add_model_version(session)
-    finally:
-        session.close()
-    
-    stop_args = {}
-    try:
-        add_process_event(type="start", args={'options':options.__dict__, 'args': args, 'command_line': sys.argv}, session_maker=Session)
-        stop_args = do_run(options, Session)
-    except Exception as e:
-        utils.get_logger().exception("Error in main thread")        
-        outfile = StringIO.StringIO()
-        try:
-            traceback.print_exc(file=outfile)
-            stop_args = {'error': repr(e), 'message': getattr(e, 'message', ''), 'stacktrace':outfile.getvalue()}
-        finally:
-            outfile.close()
-        raise
-    finally:    
-        add_process_event(type="shutdown", args=stop_args, session_maker=Session)
-
-    utils.get_logger().debug("Done. Exiting. " + repr(stop_args))
-
-
-
-if __name__ == '__main__':
-
-    (options, args) = get_options()
-    
-    loggers = set_logging(options)
-    
-    utils.get_logger().debug("OPTIONS : " + repr(options))
-    
-    if options.daemon:
-        import daemon
-        import lockfile
-        
-        hdlr_preserve = []
-        for logger in loggers:
-            hdlr_preserve.extend([h.stream for h in logger.handlers])
-            
-        context = daemon.DaemonContext(working_directory=os.getcwd(), files_preserve=hdlr_preserve) 
-        with context:
-            main(options, args)
-    else:
-        main(options, args)
-
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/script/utils/export_pad.py	Fri May 10 13:34:40 2013 +0200
@@ -0,0 +1,320 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+from dateutil.parser import parse as parse_date
+from iri_tweet.utils import set_logging_options, set_logging, get_logger
+from lxml import etree
+from optparse import OptionParser
+import anyjson
+import datetime
+import functools
+import httplib2
+import os.path
+import requests
+import sys
+import time
+import uuid
+
+
+class EtherpadRequestException(Exception):
+    def __init__(self, original_resp):
+        super(EtherpadRequestException, self).__init__(original_resp["message"])
+        self.status = original_resp["status"]
+        self.original_resp = original_resp
+
+
+class EtherpadRequest():
+    
+    def __init__(self, base_url, api_key):
+        self.base_url = base_url
+        self.api_key = api_key
+        self.__request = None
+
+    def __getattr__(self, name):
+        return functools.partial(self.__action, name)
+
+    def __action(self, action, **kwargs):
+        url = "%s/%s" % (self.base_url, action)
+        params = dict(kwargs)
+        params['apikey'] = self.api_key
+        
+        r = requests.get(url, params)
+        
+        resp = anyjson.deserialize(r.text)
+        
+        if resp["code"] == 0:
+            return resp["data"]
+        else:
+            raise EtherpadRequestException(resp)
+        
+        return resp
+    
+    def getRevisionsCount(self, padID):
+        f = self.__getattr__("getRevisionsCount")
+        res = f(padID=padID)
+        
+        return res["revisions"]
+    
+    def getPadUrl(self, padID):
+        
+        return "%s/%s" % (self.base_url,padID)
+    
+    
+
+def abort(message, parser):
+    if message is not None:
+        sys.stderr.write(message + "\n")
+    parser.print_help()
+    sys.exit(1)
+
+def get_options():
+    
+    parser = OptionParser()
+    parser.add_option("-u", "--api-url", dest="api_url",
+                      help="Base etherpad-lite api url", metavar="API_URL", default=None)
+    parser.add_option("-k", "--api-key", dest="api_key",
+                      help="Base etherpad-lite api url", metavar="API_KEY", default=None)
+    parser.add_option("-p", "--pad-id", dest="pad_id",
+                      help="pad id", metavar="PADID")
+    parser.add_option("-s", "--start-date", dest="start_date",
+                      help="start date", metavar="START_DATE", default=None)
+    parser.add_option("-e", "--end-date", dest="end_date",
+                      help="end date", metavar="END_DATE", default=None)
+    parser.add_option("-f", "--format", dest="format", type="choice",
+                      help="format", metavar="FORMAT", choice=['html', 'text'], default='html')
+    parser.add_option("-I", "--content-file", dest="content_file",
+                      help="Content file", metavar="CONTENT_FILE")
+    parser.add_option("-C", "--color", dest="color",
+                      help="Color code", metavar="COLOR", default="16763904")
+    parser.add_option("-D", "--duration", dest="duration", type="int",
+                      help="Duration", metavar="DURATION", default=None)
+    parser.add_option("-n", "--name", dest="name",
+                      help="Cutting name", metavar="NAME", default=u"pads")
+    parser.add_option("-R", "--replace", dest="replace", action="store_true",
+                      help="Replace tweet ensemble", metavar="REPLACE", default=False)
+    parser.add_option("-m", "--merge", dest="merge", action="store_true",
+                      help="merge tweet ensemble, choose the first ensemble", metavar="MERGE", default=False)
+    parser.add_option("-E", "--extended", dest="extended_mode", action="store_true",
+                      help="Trigger polemic extended mode", metavar="EXTENDED", default=False)
+    parser.add_option("-S", "--step", dest="step", type=1,
+                      help="step for version", metavar="STEP", default=False)
+
+    
+    
+    set_logging_options(parser)
+
+    
+    return parser.parse_args() + (parser,)
+
+
+if __name__ == "__main__" :
+
+    (options, args, parser) = get_options()
+    
+    set_logging(options)
+    get_logger().debug("OPTIONS : " + repr(options)) #@UndefinedVariable
+    
+    if len(sys.argv) == 1:
+        abort(None)
+
+    base_url = options.get("api_url", None)
+    if not base_url:
+        abort("No base url")
+
+    api_key = options.get("api_key", None)
+    if not api_key:
+        abort("api key missing")
+        
+    pad_id = options.get("pad_id", None)
+    if not pad_id:
+        abort("No pad id")
+
+    start_date_str = options.get("start_date",None)
+    end_date_str = options.get("end_date", None)
+    duration = options.get("duration", None)
+    
+    start_date = None
+    start_ts = None
+    if start_date_str:
+        start_date = parse_date(start_date_str) 
+        start_ts = time.mktime(start_date.timetuple())*1000
+
+    end_date = None
+    if end_date_str:
+        end_date = parse_date(end_date_str)
+    elif start_date and duration:
+        end_date = start_date + datetime.timedelta(seconds=duration)
+        
+    if start_date is None or end_date is None:
+        abort("No start date found")
+
+    end_ts = None
+    if end_date is not None:
+        end_ts = time.mktime(end_date.timetuple())*1000
+
+    content_file = options.get("content_file", None)
+    
+    if not content_file:
+        abort("No content file")        
+
+    root = None
+
+    if content_file.find("http") == 0:
+
+        get_logger().debug("url : " + content_file) #@UndefinedVariable
+        
+        h = httplib2.Http()
+        resp, content = h.request(content_file)
+        
+        get_logger().debug("url response " + repr(resp) + " content " + repr(content)) #@UndefinedVariable
+        
+        project = anyjson.deserialize(content)
+        root = etree.fromstring(project["ldt"])
+                
+    elif os.path.exists(content_file):
+
+        doc = etree.parse(content_file)
+        root = doc.getroot()
+            
+    if root is None:
+        abort("No content file, file not found")
+
+    cutting_name = options.get("name", None)
+    if cutting_name is None:
+        cutting_name = "pad_%s" % pad_id
+
+    output_format = options.get('format','html')
+    ensemble_parent = None
+                
+    file_type = None
+    for node in root:
+        if node.tag == "project":
+            file_type = "ldt"
+            break
+        elif node.tag == "head":
+            file_type = "iri"
+            break
+    if file_type is None:
+        abort("Unknown file type")
+
+    if file_type == "ldt":
+        media_nodes = root.xpath("//media")
+        if len(media_nodes) > 0:
+            media = media_nodes[0]
+        annotations_node = root.find(u"annotations")
+        if annotations_node is None:
+            annotations_node = etree.SubElement(root, u"annotations")
+        content_node = annotations_node.find(u"content")
+        if content_node is None:
+            content_node = etree.SubElement(annotations_node,u"content", id=media.get(u"id"))
+        ensemble_parent = content_node
+    elif file_type == "iri":
+        body_node = root.find(u"body")
+        if body_node is None:
+            body_node = etree.SubElement(root, u"body")
+        ensembles_node = body_node.find(u"ensembles")
+        if ensembles_node is None:
+            ensembles_node = etree.SubElement(body_node, u"ensembles")
+        ensemble_parent = ensembles_node
+
+    if ensemble_parent is None:
+        abort("Can not add cutting")
+
+    if options.replace:
+        for ens in ensemble_parent.iterchildren(tag=u"ensemble"):
+            if ens.get("id","").startswith(cutting_name):
+                ensemble_parent.remove(ens)
+                
+    ensemble = None
+    elements = None
+                
+    if options.merge:
+        ensemble = ensemble_parent.find(u"ensemble")
+        if ensemble is not None:
+            elements = ensemble.find(u".//elements")                
+        
+    if ensemble is None or elements is None:
+        ensemble = etree.SubElement(ensemble_parent, u"ensemble", {u"id":u"tweet_" + unicode(uuid.uuid4()), u"title":u"Ensemble pad", u"author":u"IRI Web", u"abstract":u"Ensemble Pad"})
+        decoupage = etree.SubElement(ensemble, u"decoupage", {u"id": unicode(uuid.uuid4()), u"author": u"IRI Web"})
+    
+        etree.SubElement(decoupage, u"title").text = unicode(cutting_name)
+        etree.SubElement(decoupage, u"abstract").text = unicode(cutting_name)
+    
+        elements = etree.SubElement(decoupage, u"elements")
+
+
+    etp_req = EtherpadRequest(base_url, api_key)
+    rev_count = etp_req.getRevisionCount(pad_id)
+    
+    
+    version_range = range(1,rev_count+1, 1)
+    #make sure that teh last version is exported
+    if rev_count not in version_range:
+        version_range.append(rev_count)
+    for rev in version_range:
+        
+        data = None
+        text = ""
+        
+        if output_format == "html":
+            data = etp_req.getHtml(padID=pad_id, rev=rev)
+            text = data.get("html", "")
+        else:
+            data = etp_req.getText(padID=pad_id, rev=rev)
+            text = data.get("text","")
+
+        pad_ts = data['timestamp']
+        
+        if pad_ts < start_ts:
+            continue
+        
+        if end_ts is not None and pad_ts > end_ts:
+            break
+
+        pad_dt = datetime.datetime.fromtimestamp(float(pad_ts)/1000.0)
+        pad_ts_rel = pad_ts - start_ts
+        
+        username = None
+        color = ""
+        if 'author' in data:
+            username = data['author']['name'] if ('name' in data['author'] and data['author']['name']) else data['author']['id']
+            color =  data['author']['color'] if ('color' in data['author'] and data['author']['color']) else ""
+        
+        if not username:
+            username = "anon."
+            
+            
+        element = etree.SubElement(elements, u"element" , {u"id":"%s-%s-%d" %(unicode(uuid.uuid4()),unicode(pad_id),rev), u"color":unicode(color), u"author":unicode(username), u"date":unicode(pad_dt.strftime("%Y/%m/%d")), u"begin": unicode(pad_ts_rel), u"dur":u"0", u"src":""})
+        etree.SubElement(element, u"title").text = "%s: %s - rev %d" % (unicode(username), unicode(pad_id), rev)
+        etree.SubElement(element, u"abstract").text = unicode(text)
+        
+        meta_element = etree.SubElement(element, u'meta')
+        etree.SubElement(meta_element, "pad_url").text = etree.CDATA(unicode(etp_req.getPadUrl(pad_id)))
+        etree.SubElement(meta_element, "revision").text = etree.CDATA(unicode(rev))
+
+    # sort by tc in
+    if options.merge :
+        elements[:] = sorted(elements,key=lambda n: int(n.get('begin')))
+    
+    output_data = etree.tostring(root, encoding="utf-8", method="xml", pretty_print=False, xml_declaration=True)  
+    
+    if content_file and content_file.find("http") == 0:
+        
+        project["ldt"] = output_data
+        body = anyjson.serialize(project)
+        h = httplib2.Http()
+        resp, content = h.request(content_file, "PUT", headers={'content-type':'application/json'}, body=body)
+        if resp.status != 200:
+            raise Exception("Error writing content : %d : %s"%(resp.status, resp.reason))                        
+    else:
+        if content_file and os.path.exists(content_file):
+            dest_file_name = content_file 
+        else:
+            dest_file_name = options.filename
+
+        output = open(dest_file_name, "w")
+        output.write(output_data)
+        output.flush()
+        output.close()
+        
+
--- a/script/utils/export_tweet_db.py	Tue May 07 18:28:26 2013 +0200
+++ b/script/utils/export_tweet_db.py	Fri May 10 13:34:40 2013 +0200
@@ -1,8 +1,11 @@
-from models import setup_database
-from optparse import OptionParser #@UnresolvedImport
-from sqlalchemy.orm import sessionmaker
-from utils import set_logging_options, set_logging, TwitterProcessor, logger
-import sqlite3 #@UnresolvedImport
+from iri_tweet.models import setup_database
+from iri_tweet.processor import TwitterProcessorStatus
+from iri_tweet.utils import set_logging_options, set_logging
+from optparse import OptionParser
+import logging
+import sqlite3
+
+logger = logging.getLogger(__name__)
 
 
 #    'entities': "tweet_entity",     
@@ -33,7 +36,7 @@
             fields_mapping = {}
             for i,res in enumerate(curs_in.execute("select json from tweet_tweet;")):
                 logger.debug("main loop %d : %s" % (i, res[0])) #@UndefinedVariable
-                processor = TwitterProcessor(eval(res[0]), res[0], None, session, options.token_filename)
+                processor = TwitterProcessorStatus(json_dict=eval(res[0]), json_txt=res[0], source_id=None, session=session, consumer_token=None, access_token=None, token_filename=options.token_filename, user_query_twitter=False, logger=logger)
                 processor.process()
                 session.commit()
             logger.debug("main : %d tweet processed" % (i+1)) #@UndefinedVariable
--- a/script/utils/export_twitter_alchemy.py	Tue May 07 18:28:26 2013 +0200
+++ b/script/utils/export_twitter_alchemy.py	Fri May 10 13:34:40 2013 +0200
@@ -366,7 +366,7 @@
                     username = None
                     profile_url = ""
                     if tw.user is not None:
-                        username = tw.user.name                    
+                        username = tw.user.screen_name
                         profile_url = tw.user.profile_image_url if tw.user.profile_image_url is not None else ""
                     if not username:
                         username = "anon."
@@ -416,7 +416,7 @@
                     get_logger().debug("write http " + repr(project)) #@UndefinedVariable
                     r = requests.put(content_file_write, data=anyjson.dumps(project), headers={'content-type':'application/json'}, params=post_param);
                     get_logger().debug("write http " + repr(r) + " content " + r.text) #@UndefinedVariable
-                    if r.status_code != requests.codes.ok:
+                    if r.status_code != requests.codes.ok:  # @UndefinedVariable
                         r.raise_for_status()
                 else:
                     if content_file_write and os.path.exists(content_file_write):
--- a/script/utils/get_stats.py	Tue May 07 18:28:26 2013 +0200
+++ b/script/utils/get_stats.py	Fri May 10 13:34:40 2013 +0200
@@ -1,14 +1,13 @@
 
+from lxml import etree
 import httplib2
-import anyjson
-from lxml import etree
+import pprint
 import sys
-import pprint
 
 def get_stats(url):
     
     h = httplib2.Http()
-    resp, content = h.request(url)    
+    _, content = h.request(url)    
     #project = anyjson.deserialize(content)
     root = etree.fromstring(content)
 
--- a/script/utils/merge_tweets.py	Tue May 07 18:28:26 2013 +0200
+++ b/script/utils/merge_tweets.py	Fri May 10 13:34:40 2013 +0200
@@ -1,12 +1,15 @@
 #from models import setup_database
 from iri_tweet.models import setup_database, TweetSource, Tweet, TweetLog
-from iri_tweet.utils import TwitterProcessor, get_oauth_token, show_progress
+from iri_tweet.processor import TwitterProcessorStatus
+from iri_tweet.utils import get_oauth_token, show_progress
+import anyjson
 import argparse
-import sys
+import codecs
+import logging
 import re
-import anyjson
-import math
-import codecs
+import sys
+
+logger = logging.getLogger(__name__)
 
 def get_option():
     
@@ -16,6 +19,10 @@
                         help="log to file", metavar="LOG", default="stderr")
     parser.add_argument("-v", dest="verbose", action="count",
                         help="verbose", default=0)
+    parser.add_option("-k", "--key", dest="consumer_key",
+                      help="Twitter consumer key", metavar="CONSUMER_KEY")
+    parser.add_option("-s", "--secret", dest="consumer_secret",
+                      help="Twitter consumer secret", metavar="CONSUMER_SECRET")
     parser.add_argument("-q", dest="quiet", action="count",
                         help="quiet", default=0)
     parser.add_argument("--query-user", dest="query_user", action="store_true",
@@ -38,7 +45,7 @@
     
     access_token = None
     if options.query_user:
-        access_token = get_oauth_token(options.token_filename)
+        access_token = get_oauth_token(options.consumer_key, options.consumer_secret, options.token_filename)
     
     #open source
     src_conn_str = options.source[0].strip()
@@ -60,7 +67,7 @@
         session_src = Session_src()
         session_tgt = Session_tgt()
         
-        count_tw_query = Tweet.__table__.count()
+        count_tw_query = Tweet.__table__.count()  # @UndefinedVariable
         
         count_tw = engine_src.scalar(count_tw_query)
         
@@ -83,23 +90,28 @@
                                 
                 tweet_obj = anyjson.deserialize(tweet_source)
                 if 'text' not in tweet_obj:
-                    tweet_log = TweetLog(tweet_source_id=source_id, status=TweetLog.TWEET_STATUS['NOT_TWEET'])
+                    tweet_log = TweetLog(tweet_source_id=tweet.tweet_source.id, status=TweetLog.TWEET_STATUS['NOT_TWEET'])
                     session_tgt.add(tweet_log)
                 else:                
-                    tp = TwitterProcessor(None, tweet_source, None, session_tgt, access_token, options.token_filename, user_query_twitter=options.query_user)
+                    tp = TwitterProcessorStatus(None, tweet_source, None, session_tgt, access_token, options.token_filename, user_query_twitter=options.query_user, logger=logger)
                     tp.process()
                 
                 session_tgt.flush()
                 
-            show_progress(i+1, count_tw, repr(progress_text+tweet.text), 70)
+            ptext = progress_text + tweet.text
+            show_progress(i+1, count_tw, ptext.replace("\n",""), 70)
                             
         session_tgt.commit()
         print u"%d new tweet added" % (added)
         
     finally:
-        session_tgt.close() if session_tgt is not None else None
-        session_src.close() if session_src is not None else None
-        conn_tgt.close() if conn_tgt is not None else None
-        conn_src.close() if conn_src is not None else None
+        if session_tgt is not None:
+            session_tgt.close()
+        if session_src is not None:
+            session_src.close()
+        if conn_tgt is not None:
+            conn_tgt.close()
+        if conn_src is not None:
+            conn_src.close()
         
         
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/script/utils/search_topsy.py	Fri May 10 13:34:40 2013 +0200
@@ -0,0 +1,168 @@
+from blessings import Terminal
+from iri_tweet import models, utils
+from iri_tweet.processor import TwitterProcessorStatus
+from optparse import OptionParser
+import logging
+import math
+import re
+import requests
+import time
+import twitter
+
+logger = logging.getLogger(__name__)
+
+APPLICATION_NAME = "Tweet recorder user"
+CONSUMER_KEY = "Vdr5ZcsjI1G3esTPI8yDg"
+CONSUMER_SECRET = "LMhNrY99R6a7E0YbZZkRFpUZpX5EfB1qATbDk1sIVLs"
+
+
+class TopsyResource(object):
+    
+    def __init__(self, query, **kwargs):
+                
+        self.options = kwargs
+        self.options['q'] = query
+        self.url = kwargs.get("url", "http://otter.topsy.com/search.json")
+        self.page = 0
+        self.req = None
+        self.res = {}
+        
+    def __initialize(self):
+        
+        params = {}
+        params.update(self.options)
+        self.req = requests.get(self.url, params=params)
+        self.res = self.req.json
+        
+    def __next_page(self):
+        page = self.res.get("response").get("page") + 1
+        params = {}
+        params.update(self.options)
+        params['page'] = page
+        self.req = requests.get(self.url, params=params)
+        self.res = self.req.json
+
+    def __iter__(self):        
+        if not self.req:
+            self.__initialize()
+        while "response" in self.res and "list" in self.res.get("response") and self.res.get("response").get("list"):
+            for item in  self.res.get("response").get("list"):
+                yield item
+            self.__next_page()
+            
+    def total(self):
+        if not self.res:
+            return 0
+        else:
+            return self.res.get("response",{}).get("total",0)
+            
+
+
+def get_option():
+    
+    parser = OptionParser()
+
+    parser.add_option("-d", "--database", dest="database",
+                      help="Input database", metavar="DATABASE")
+    parser.add_option("-Q", dest="query",
+                      help="query", metavar="QUERY")
+    parser.add_option("-t", dest="token_filename", metavar="TOKEN_FILENAME", default=".oauth_token",
+                      help="Token file name")
+    parser.add_option("-T", dest="topsy_apikey", metavar="TOPSY_APIKEY", default=None,
+                      help="Topsy apikey")
+    
+    utils.set_logging_options(parser)
+
+    return parser.parse_args()
+
+
+
+if __name__ == "__main__":
+
+    (options, args) = get_option()
+    
+    utils.set_logging(options);
+
+
+    acess_token_key, access_token_secret = utils.get_oauth_token(consumer_key=CONSUMER_KEY, consumer_secret=CONSUMER_SECRET, options.token_filename, application_name=APPLICATION_NAME)
+
+    t = twitter.Twitter(domain="api.twitter.com", auth=twitter.OAuth(acess_token_key, access_token_secret, CONSUMER_KEY, CONSUMER_SECRET), secure=True)
+    t.secure = True
+    
+    conn_str = options.database.strip()
+    if not re.match("^\w+://.+", conn_str):
+        conn_str = 'sqlite:///' + conn_str
+    
+    engine, metadata, Session = models.setup_database(conn_str, echo=((options.verbose-options.quiet)>0), create_all=True)
+    session = None
+    
+    
+    topsy_parameters = {
+        'apikey': options.topsy_apikey,
+        'perpage': 100,
+        'window': 'a',
+        'type': 'tweet',
+        'hidden': True,
+    }
+    
+    term = Terminal()
+    
+    try:
+        session = Session()
+        
+        results = None        
+        page = 1
+        print options.query
+
+        tr = TopsyResource(options.query, **topsy_parameters)
+        
+        move_up = 0
+        
+        for i,item in enumerate(tr):
+            # get id
+            url = item.get("url")
+            tweet_id = url.split("/")[-1]
+            
+            if move_up > 0:
+                print((move_up+1)*term.move_up())
+                move_up = 0
+            
+            print ("%d/%d:%03d%% - %s - %r" % (i+1, tr.total(), int(float(i+1)/float(tr.total())*100.0), tweet_id, item.get("content") ) + term.clear_eol())            
+            move_up += 1
+            
+            count_tweet = session.query(models.Tweet).filter_by(id_str=tweet_id).count()
+            
+            if count_tweet:
+                continue
+            try:                                    
+                tweet = t.statuses.show(id=tweet_id, include_entities=True)
+            except twitter.api.TwitterHTTPError as e:
+                if e.e.code == 404 or e.e.code == 403:
+                    continue
+                else:
+                    raise
+            
+            processor = TwitterProcessorStatus(tweet, None, None, session, None, options.token_filename, logger)
+            processor.process()
+            session.flush()
+            session.commit()
+                        
+            time_to_sleep = int(math.ceil((tweet.rate_limit_reset - time.mktime(time.gmtime())) / tweet.rate_limit_remaining))
+            
+            print "rate limit remaining %s of %s" % (str(tweet.rate_limit_remaining), str(tweet.headers.getheader('x-ratelimit-limit'))) + term.clear_eol()
+            move_up += 1
+            for i in xrange(time_to_sleep):
+                if i:
+                    print(2*term.move_up())
+                else:
+                    move_up += 1
+                print(("Sleeping for %d seconds, %d remaining" % (time_to_sleep, time_to_sleep-i)) + term.clear_eol())
+                time.sleep(1)
+                
+    except twitter.api.TwitterHTTPError as e:
+        fmt = ("." + e.format) if e.format else ""
+        print "Twitter sent status %s for URL: %s%s using parameters: (%s)\ndetails: %s" % (repr(e.e.code), repr(e.uri), repr(fmt), repr(e.uriparts), repr(e.response_data))
+        
+    finally:
+        if session:
+            session.close()
--- a/script/utils/tweet_twitter_user.py	Tue May 07 18:28:26 2013 +0200
+++ b/script/utils/tweet_twitter_user.py	Fri May 10 13:34:40 2013 +0200
@@ -38,6 +38,7 @@
     parser.add_option("-t", dest="token_filename", metavar="TOKEN_FILENAME", default=".oauth_token",
                       help="Token file name")
     parser.add_option("-S", dest="simulate", metavar="SIMULATE", default=False, action="store_true", help="Simulate call to twitter. Do not change the database")
+    parser.add_option("--direct-message", dest="direct_message", metavar="DIRECT_MESSAGE", default=False, action="store_true", help="send direc t message to the user, else create a status update mentioning the user (@username)")
     parser.add_option("-f", dest="force", metavar="FORCE", default=False, action="store_true", help="force sending message to all user even if it has already been sent")
 
 
@@ -103,16 +104,25 @@
                 
             query_res = query.all()
             
-            acess_token_key, access_token_secret = get_oauth_token(options.token_filename, application_name=APPLICATION_NAME, consumer_key=CONSUMER_KEY, consumer_secret=CONSUMER_SECRET)
+            acess_token_key, access_token_secret = get_oauth_token(consumer_key=CONSUMER_KEY, consumer_secret=CONSUMER_SECRET, token_file_path=options.token_filename, application_name=APPLICATION_NAME)
             t = twitter.Twitter(auth=twitter.OAuth(acess_token_key, access_token_secret, CONSUMER_KEY, CONSUMER_SECRET))
 
             for user in query_res:
                 screen_name = user.screen_name
                 
-                message = u"@%s: %s" % (screen_name, base_message)
-                get_logger().debug("new status : " + message) #@UndefinedVariable
+                if options.direct_message:
+                    message = base_message
+                else:
+                    message = u"@%s: %s" % (screen_name, base_message)
+
+                print("new message : " + message)
+                get_logger().debug("new message : " + message) #@UndefinedVariable
+
                 if not options.simulate:
-                    t.statuses.update(status=message)
+                    if options.direct_message:
+                        t.direct_messages.new(user_id=user.id, screen_name=screen_name, text=message)
+                    else:
+                        t.statuses.update(status=message)
                     user_message = UserMessage(user_id=user.id, message_id=message_obj.id)
                     session.add(user_message)
                     session.flush()
--- a/script/virtualenv/res/credential.txt	Tue May 07 18:28:26 2013 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,20 +0,0 @@
-Consumer key
-54ThDZhpEjokcMgHJOMnQA
-
-Consumer secret
-wUoL9UL2T87tfc97R0Dff2EaqRzpJ5XGdmaN2XK3udA
-
-access_tokens:
-47312923-LiNTtz0I18YXMVIrFeTuhmH7bOvYsK6p3Ln2Dc
-
-access_secret:
-r3LoXVcjImNAElUpWqTu2SG2xCdWFHkva7xeQoncA
-
-Request token URL
-http://twitter.com/oauth/request_token
-
-Access token URL
-http://twitter.com/oauth/access_token
-
-Authorize URL
-http://twitter.com/oauth/authorize
\ No newline at end of file
--- a/script/virtualenv/res/lib/lib_create_env.py	Tue May 07 18:28:26 2013 +0200
+++ b/script/virtualenv/res/lib/lib_create_env.py	Fri May 10 13:34:40 2013 +0200
@@ -16,26 +16,28 @@
 
 URLS = {
     #'': {'setup': '', 'url':'', 'local':''},
-    'DISTRIBUTE': {'setup': 'distribute', 'url':'http://pypi.python.org/packages/source/d/distribute/distribute-0.6.32.tar.gz', 'local':"distribute-0.6.32.tar.gz", 'install': {'method': 'pip', 'option_str': None, 'dict_extra_env': None}},
+    'DISTRIBUTE': {'setup': 'distribute', 'url':'http://pypi.python.org/packages/source/d/distribute/distribute-0.6.34.tar.gz', 'local':"distribute-0.6.34.tar.gz", 'install': {'method': 'pip', 'option_str': None, 'dict_extra_env': None}},
     'ANYJSON': {'setup': 'anyjson', 'url':'http://pypi.python.org/packages/source/a/anyjson/anyjson-0.3.3.tar.gz', 'local':"anyjson-0.3.3.tar.gz", 'install': {'method': 'pip', 'option_str': None, 'dict_extra_env': None}},
     'OAUTH2': { 'setup': 'python-oauth2', 'url':"https://github.com/simplegeo/python-oauth2/tarball/hudson-python-oauth2-211", 'local':"python-oauth2-1.5-211.tar.gz", 'install': {'method': 'pip', 'option_str': None, 'dict_extra_env': None}},
-    'HTTPLIB2': { 'setup': 'httplib2', 'url':'http://pypi.python.org/packages/source/h/httplib2/httplib2-0.7.7.tar.gz', 'local':"httplib2-0.7.7.tar.gz", 'install': {'method': 'pip', 'option_str': None, 'dict_extra_env': None}},
+    'HTTPLIB2': { 'setup': 'httplib2', 'url':'http://pypi.python.org/packages/source/h/httplib2/httplib2-0.8.tar.gz', 'local':"httplib2-0.8.tar.gz", 'install': {'method': 'pip', 'option_str': None, 'dict_extra_env': None}},
     'LOCKFILE': {'setup': 'lockfile', 'url':'http://code.google.com/p/pylockfile/downloads/detail?name=lockfile-0.9.1.tar.gz', 'local':"lockfile-0.9.1.tar.gz", 'install': {'method': 'pip', 'option_str': None, 'dict_extra_env': None}},
     'DAEMON': {'setup': 'python-daemon', 'url':'http://pypi.python.org/packages/source/p/python-daemon/python-daemon-1.5.5.tar.gz', 'local':"python-daemon-1.5.5.tar.gz", 'install': {'method': 'pip', 'option_str': None, 'dict_extra_env': None}},
     'DATEUTIL': {'setup': 'python-dateutil', 'url':'http://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.1.tar.gz', 'local':"python-dateutil-2.1.tar.gz", 'install': {'method': 'pip', 'option_str': None, 'dict_extra_env': None}},
-    'PYTZ': {'setup': 'pytz', 'url':'http://pypi.python.org/packages/source/p/pytz/pytz-2012h.tar.bz2', 'local':"pytz-2012h.tar.bz2", 'install': {'method': 'pip', 'option_str': None, 'dict_extra_env': None}},
-    'SIMPLEJSON': {'setup': 'simplejson', 'url':'http://pypi.python.org/packages/source/s/simplejson/simplejson-2.6.2.tar.gz', 'local':"simplejson-2.6.2.tar.gz", 'install': {'method': 'pip', 'option_str': None, 'dict_extra_env': None}},
-    'SQLALCHEMY': {'setup': 'sqlalchemy', 'url':'http://downloads.sourceforge.net/project/sqlalchemy/sqlalchemy/0.8.0b1/SQLAlchemy-0.8.0b1.tar.gz?r=http%3A%2F%2Fwww.sqlalchemy.org%2Fdownload.html&ts=1355091775&use_mirror=ignum', 'local':"SQLAlchemy-0.8.0b1.tar.gz", 'install': {'method': 'pip', 'option_str': None, 'dict_extra_env': None}},
-    'TWEEPY': {'setup': 'tweepy', 'url':'https://github.com/tweepy/tweepy/archive/1.12.tar.gz', 'local':"tweepy-1.12.tar.gz", 'install': {'method': 'pip', 'option_str': None, 'dict_extra_env': None}},
-    'TWITTER': {'setup': 'twitter', 'url':'http://pypi.python.org/packages/source/t/twitter/twitter-1.9.0.tar.gz', 'local':"twitter-1.9.0.tar.gz", 'install': {'method': 'pip', 'option_str': None, 'dict_extra_env': None}},
+    'PYTZ': {'setup': 'pytz', 'url':'http://pypi.python.org/packages/source/p/pytz/pytz-2013b.tar.bz2', 'local':"pytz-2013b.tar.bz2", 'install': {'method': 'pip', 'option_str': None, 'dict_extra_env': None}},
+    'SIMPLEJSON': {'setup': 'simplejson', 'url':'http://pypi.python.org/packages/source/s/simplejson/simplejson-3.1.3.tar.gz', 'local':"simplejson-3.1.3.tar.gz", 'install': {'method': 'pip', 'option_str': None, 'dict_extra_env': None}},
+    'SQLALCHEMY': {'setup': 'sqlalchemy', 'url':'http://www.python.org/pypi/SQLAlchemy/0.8.1', 'local':"SQLAlchemy-0.8.1.tar.gz", 'install': {'method': 'pip', 'option_str': None, 'dict_extra_env': None}},
+    'TWITTER': {'setup': 'twitter', 'url':'http://pypi.python.org/packages/source/t/twitter/twitter-1.9.2.tar.gz', 'local':"twitter-1.9.2.tar.gz", 'install': {'method': 'pip', 'option_str': None, 'dict_extra_env': None}},
     'TWITTER-TEXT': {'setup': 'twitter-text', 'url':'https://github.com/dryan/twitter-text-py/archive/master.tar.gz', 'local':"twitter-text-1.0.4.tar.gz", 'install': {'method': 'pip', 'option_str': None, 'dict_extra_env': None}},
-    'REQUESTS': {'setup': 'requests', 'url':'https://github.com/kennethreitz/requests/archive/v1.1.0.tar.gz', 'local':'requests-v1.1.0.tar.gz', 'install' : {'method':'pip', 'option_str': None, 'dict_extra_env': None}},
+    'REQUESTS': {'setup': 'requests', 'url':'https://github.com/kennethreitz/requests/archive/v1.2.0.tar.gz', 'local':'requests-v1.2.0.tar.gz', 'install' : {'method':'pip', 'option_str': None, 'dict_extra_env': None}},
+    'OAUTHLIB': {'setup': 'oauthlib', 'url':'https://github.com/idan/oauthlib/archive/0.4.0.tar.gz', 'local':'oauthlib-0.4.0.tar.gz', 'install' : {'method':'pip', 'option_str': None, 'dict_extra_env': None}},
+    'REQUESTS-OAUTHLIB': {'setup': 'requests-oauthlib', 'url':'https://github.com/requests/requests-oauthlib/archive/master.tar.gz', 'local':'requests-oauthlib-0.3.0.tar.gz', 'install' : {'method':'pip', 'option_str': None, 'dict_extra_env': None}},
+    'BLESSINGS': {'setup': 'blessings', 'url':'https://github.com/erikrose/blessings/archive/1.5.tar.gz', 'local':'blessings-1.5.tar.gz', 'install' :  {'method':'pip', 'option_str': None, 'dict_extra_env': None}}
 }
 
 if system_str == 'Windows':
         
     URLS.update({
-        'PSYCOPG2': {'setup': 'psycopg2','url': 'psycopg2-2.4.5.win32-py2.7-pg9.1.3-release.zip', 'local':"psycopg2-2.4.5.win32-py2.7-pg9.1.3-release.zip", 'install': {'method': 'install_psycopg2', 'option_str': None, 'dict_extra_env': None}},
+        'PSYCOPG2': {'setup': 'psycopg2','url': 'psycopg2-2.5.win32-py2.7-pg9.2.4-release.zip', 'local':"psycopg2-2.5.win32-py2.7-pg9.2.4-release.zip", 'install': {'method': 'install_psycopg2', 'option_str': None, 'dict_extra_env': None}},
         'LXML': {'setup': 'lxml', 'url': 'http://pypi.python.org/packages/2.7/l/lxml/lxml-2.3-py2.7-win32.egg', 'local':"lxml-2.3-py2.7-win32.egg", 'install': {'method': 'easy_install', 'option_str': None, 'dict_extra_env': None}},
     })
 else:
@@ -47,8 +49,8 @@
         lxml_method = 'pip'
 
     URLS.update({
-        'PSYCOPG2': {'setup': 'psycopg2','url': 'http://www.psycopg.org/psycopg/tarballs/PSYCOPG-2-4/psycopg2-2.4.5.tar.gz', 'local':"psycopg2-2.4.5.tar.gz", 'install': {'method': 'pip', 'option_str': None, 'dict_extra_env': None}},
-        'LXML': {'setup': 'lxml', 'url':"lxml-3.0.1.tar.gz", 'local':"lxml-3.0.1.tar.gz", 'install': {'method': lxml_method, 'option_str': None, 'dict_extra_env': lxml_options}},
+        'PSYCOPG2': {'setup': 'psycopg2','url': 'http://www.psycopg.org/psycopg/tarballs/PSYCOPG-2-5/psycopg2-2.5.tar.gz', 'local':"psycopg2-2.5.tar.gz", 'install': {'method': 'pip', 'option_str': None, 'dict_extra_env': None}},
+        'LXML': {'setup': 'lxml', 'url':"lxml-3.1.2.tar.gz", 'local':"lxml-3.1.2.tar.gz", 'install': {'method': lxml_method, 'option_str': None, 'dict_extra_env': lxml_options}},
     })
     
 
Binary file script/virtualenv/res/src/SQLAlchemy-0.8.0b1.tar.gz has changed
Binary file script/virtualenv/res/src/SQLAlchemy-0.8.1.tar.gz has changed
Binary file script/virtualenv/res/src/blessings-1.5.tar.gz has changed
Binary file script/virtualenv/res/src/distribute-0.6.32.tar.gz has changed
Binary file script/virtualenv/res/src/distribute-0.6.34.tar.gz has changed
Binary file script/virtualenv/res/src/httplib2-0.7.7.tar.gz has changed
Binary file script/virtualenv/res/src/httplib2-0.8.tar.gz has changed
Binary file script/virtualenv/res/src/lxml-3.0.1.tar.gz has changed
Binary file script/virtualenv/res/src/lxml-3.1.2.tar.gz has changed
Binary file script/virtualenv/res/src/oauthlib-0.4.0.tar.gz has changed
Binary file script/virtualenv/res/src/psycopg2-2.4.5.tar.gz has changed
Binary file script/virtualenv/res/src/psycopg2-2.5.tar.gz has changed
Binary file script/virtualenv/res/src/psycopg2-2.5.win32-py2.7-pg9.2.4-release.zip has changed
Binary file script/virtualenv/res/src/pytz-2012h.tar.bz2 has changed
Binary file script/virtualenv/res/src/pytz-2013b.tar.bz2 has changed
Binary file script/virtualenv/res/src/requests-1.1.0.tar.gz has changed
Binary file script/virtualenv/res/src/requests-oauthlib-0.3.0.tar.gz has changed
Binary file script/virtualenv/res/src/requests-v1.2.0.tar.gz has changed
Binary file script/virtualenv/res/src/simplejson-2.6.2.tar.gz has changed
Binary file script/virtualenv/res/src/simplejson-3.1.3.tar.gz has changed
Binary file script/virtualenv/res/src/tweepy-1.12.tar.gz has changed
Binary file script/virtualenv/res/src/twitter-1.9.0.tar.gz has changed
Binary file script/virtualenv/res/src/twitter-1.9.2.tar.gz has changed
--- a/script/virtualenv/script/res/res_create_env.py	Tue May 07 18:28:26 2013 +0200
+++ b/script/virtualenv/script/res/res_create_env.py	Fri May 10 13:34:40 2013 +0200
@@ -17,10 +17,12 @@
     'DATEUTIL',
     'SIMPLEJSON',
     'SQLALCHEMY',
-    'TWEEPY',
     'TWITTER',
     'TWITTER-TEXT',
-    'REQUESTS',    
+    'REQUESTS',
+    'OAUTHLIB',
+    'REQUESTS-OAUTHLIB',
+    'BLESSINGS'
 ]
 
 if system_str == "Linux":
--- a/script/virtualenv/script/virtualenv.py	Tue May 07 18:28:26 2013 +0200
+++ b/script/virtualenv/script/virtualenv.py	Fri May 10 13:34:40 2013 +0200
@@ -4,7 +4,7 @@
 
 # If you change the version here, change it in setup.py
 # and docs/conf.py as well.
-__version__ = "1.8.4"  # following best practices
+__version__ = "1.9.1"  # following best practices
 virtualenv_version = __version__  # legacy, again
 
 import base64
@@ -862,6 +862,19 @@
         'VIRTUALENV_DISTRIBUTE to make it the default ')
 
     parser.add_option(
+        '--no-setuptools',
+        dest='no_setuptools',
+        action='store_true',
+        help='Do not install distribute/setuptools (or pip) '
+        'in the new virtualenv.')
+
+    parser.add_option(
+        '--no-pip',
+        dest='no_pip',
+        action='store_true',
+        help='Do not install pip in the new virtualenv.')
+
+    parser.add_option(
         '--setuptools',
         dest='use_distribute',
         action='store_false',
@@ -961,7 +974,9 @@
                        use_distribute=options.use_distribute,
                        prompt=options.prompt,
                        search_dirs=options.search_dirs,
-                       never_download=options.never_download)
+                       never_download=options.never_download,
+                       no_setuptools=options.no_setuptools,
+                       no_pip=options.no_pip)
     if 'after_install' in globals():
         after_install(options, home_dir)
 
@@ -1048,7 +1063,8 @@
 
 def create_environment(home_dir, site_packages=False, clear=False,
                        unzip_setuptools=False, use_distribute=False,
-                       prompt=None, search_dirs=None, never_download=False):
+                       prompt=None, search_dirs=None, never_download=False,
+                       no_setuptools=False, no_pip=False):
     """
     Creates a new environment in ``home_dir``.
 
@@ -1066,14 +1082,16 @@
 
     install_distutils(home_dir)
 
-    if use_distribute:
-        install_distribute(py_executable, unzip=unzip_setuptools,
-                           search_dirs=search_dirs, never_download=never_download)
-    else:
-        install_setuptools(py_executable, unzip=unzip_setuptools,
-                           search_dirs=search_dirs, never_download=never_download)
+    if not no_setuptools:
+        if use_distribute:
+            install_distribute(py_executable, unzip=unzip_setuptools,
+                               search_dirs=search_dirs, never_download=never_download)
+        else:
+            install_setuptools(py_executable, unzip=unzip_setuptools,
+                               search_dirs=search_dirs, never_download=never_download)
 
-    install_pip(py_executable, search_dirs=search_dirs, never_download=never_download)
+        if not no_pip:
+            install_pip(py_executable, search_dirs=search_dirs, never_download=never_download)
 
     install_activate(home_dir, bin_dir, prompt)
 
@@ -1189,8 +1207,9 @@
             else:
                 if f is not None:
                     f.close()
-                # special-case custom readline.so on OS X:
-                if modname == 'readline' and sys.platform == 'darwin' and not filename.endswith(join('lib-dynload', 'readline.so')):
+                # special-case custom readline.so on OS X, but not for pypy:
+                if modname == 'readline' and sys.platform == 'darwin' and not (
+                        is_pypy or filename.endswith(join('lib-dynload', 'readline.so'))):
                     dst_filename = join(dst_prefix, 'lib', 'python%s' % sys.version[:3], 'readline.so')
                 else:
                     dst_filename = change_prefix(filename, dst_prefix)
@@ -1351,11 +1370,6 @@
     if sys.executable != py_executable:
         ## FIXME: could I just hard link?
         executable = sys.executable
-        if is_cygwin and os.path.exists(executable + '.exe'):
-            # Cygwin misreports sys.executable sometimes
-            executable += '.exe'
-            py_executable += '.exe'
-            logger.info('Executable actually exists in %s' % executable)
         shutil.copyfile(executable, py_executable)
         make_exe(py_executable)
         if is_win or is_cygwin:
@@ -1901,143 +1915,146 @@
 
 ##file site.py
 SITE_PY = convert("""
-eJzFPf1z2zaWv/OvwMqToeTKdOJ0OztO3RsncVrvuYm3SWdz63q0lARZrCmSJUjL6s3d337vAwAB
-kpLtTXdO04klEnh4eHhfeHgPHQwGp0Uhs7lY5fM6lULJuJwtRRFXSyUWeSmqZVLOD4q4rDbwdHYb
-30glqlyojYqwVRQE+1/4CfbFp2WiDArwLa6rfBVXySxO041IVkVeVnIu5nWZZDciyZIqidPkd2iR
-Z5HY/3IMgvNMwMzTRJbiTpYK4CqRL8TlplrmmRjWBc75RfTn+OVoLNSsTIoKGpQaZ6DIMq6CTMo5
-oAktawWkTCp5oAo5SxbJzDZc53U6F0Uaz6T45z95atQ0DAOVr+R6KUspMkAGYEqAVSAe8DUpxSyf
-y0iI13IW4wD8vCFWwNDGuGYKyZjlIs2zG5hTJmdSqbjciOG0rggQoSzmOeCUAAZVkqbBOi9v1QiW
-lNZjDY9EzOzhT4bZA+aJ43c5B3D8kAU/Z8n9mGED9yC4aslsU8pFci9iBAs/5b2cTfSzYbIQ82Sx
-ABpk1QibBIyAEmkyPSxoOb7VK/TdIWFluTKGMSSizI35JfWIgvNKxKkCtq0LpJEizN/KaRJnQI3s
-DoYDiEDSoG+ceaIqOw7NTuQAoMR1rEBKVkoMV3GSAbP+GM8I7b8n2TxfqxFRAFZLiV9rVbnzH/YQ
-AFo7BBgHuFhmNessTW5luhkBAp8A+1KqOq1QIOZJKWdVXiZSEQBAbSPkPSA9FnEpNQmZM43cjon+
-RJMkw4VFAUOBx5dIkkVyU5ckYWKRAOcCV7z78JN4e/b6/PS95jEDjGX2ZgU4AxRaaAcnGEAc1qo8
-THMQ6Ci4wD8ins9RyG5wfMCraXD44EoHQ5h7EbX7OAsOZNeLq4eBOVagTGisgPr9N3QZqyXQ538e
-WO8gON1GFZo4f1svc5DJLF5JsYyZv5Azgm81nO+iolq+Am5QCKcCUilcHEQwQXhAEpdmwzyTogAW
-S5NMjgKg0JTa+qsIrPA+zw5orVucABDKIIOXzrMRjZhJmGgX1ivUF6bxhmammwR2nVd5SYoD+D+b
-kS5K4+yWcFTEUPxtKm+SLEOEkBeCcC+kgdVtApw4j8QFtSK9YBqJkLUXt0SRqIGXkOmAJ+V9vCpS
-OWbxRd26W43QYLISZq1T5jhoWZF6pVVrptrLe0fR5xbXEZrVspQAvJ56QrfI87GYgs4mbIp4xeJV
-rXPinKBHnqgT8gS1hL74HSh6qlS9kvYl8gpoFmKoYJGnab4Gkh0HgRB72MgYZZ854S28g38BLv6b
-ymq2DAJnJAtYg0Lkt4FCIGASZKa5WiPhcZtm5baSSTLWFHk5lyUN9ThiHzLij2yMcw3e55U2ajxd
-XOV8lVSokqbaZCZs8bKwYv34iucN0wDLrYhmpmlDpxVOLy2W8VQal2QqFygJepFe2WWHMYOeMckW
-V2LFVgbeAVlkwhakX7Gg0llUkpwAgMHCF2dJUafUSCGDiRgGWhUEfxWjSc+1swTszWY5QIXE5nsG
-9gdw+x3EaL1MgD4zgAAaBrUULN80qUp0EBp9FPhG3/Tn8YFTzxfaNvGQizhJtZWPs+CcHp6VJYnv
-TBbYa6yJoWCGWYWu3U0GdEQxHwwGQWDcoY0yX3MVVOXmGFhBmHEmk2mdoOGbTNDU6x8q4FGEM7DX
-zbaz8EBDmE7vgUpOl0WZr/C1ndtHUCYwFvYI9sQlaRnJDrLHia+QfK5KL0xTtN0OOwvUQ8HlT2fv
-zj+ffRQn4qpRaeO2PruGMc+yGNiaLAIwVWvYRpdBS1R8Ceo+8Q7MOzEF2DPqTeIr46oG3gXUP5U1
-vYZpzLyXwdn709cXZ5OfP579NPl4/ukMEAQ7I4M9mjKaxxocRhWBcABXzlWk7WvQ6UEPXp9+tA+C
-SaImxabYwAMwlMDC5RDmOxYhPpxoGzxJskUejqjxr+yEn7Ba0R7X1fHX1+LkRIS/xndxGIDX0zTl
-RfyRBODTppDQtYI/w1yNgmAuFyAstxJFarhPnuyIOwARoWWuLeuveZKZ98xH7hAk8UPqAThMJrM0
-VgobTyYhkJY69HygQ8TuMMrJEDoWG7frSKOCn1LCUmTYZYz/9KAYT6kfosEoul1MIxCw1SxWklvR
-9KHfZIJaZjIZ6gFB/IjHwUVixREK0wS1TJmAJ0q8glpnqvIUfyJ8lFsSGdwMoV7DRdKbneguTmup
-hs6kgIjDYYuMqBoTRRwETsUQbGezdKNRm5qGZ6AZkC/NQe+VLcrhZw88FFAwZtuFWzPeLTHNENO/
-8t6AcAAnMUQFrVQLCuszcXl2KV4+PzpABwR2iXNLHa852tQkq6V9uIDVupGVgzD3CsckDCOXLgvU
-jPj0eDfMVWRXpssKC73EpVzld3IO2CIDO6ssfqI3sJeGecxiWEXQxGTBWekZTy/GnSPPHqQFrT1Q
-b0VQzPqbpd/j7bvMFKgO3goTqfU+nY1XUeZ3CboH041+CdYN1BvaOOOKBM7CeUyGRgw0BPitGVJq
-LUNQYGXNLibhjSBRw88bVRgRuAvUrdf09TbL19mE964nqCaHI8u6KFiaebFBswR74h3YDUAyh61Y
-QzSGAk66QNk6AORh+jBdoCztBgAQmGZFGywHltmc0RR5n4fDIozRK0HCW0q08HdmCNocGWI4kOht
-ZB8YLYGQYHJWwVnVoJkMZc00g4EdkvhcdxHxptEH0KJiBIZuqKFxI0O/q2NQzuLCVUpOP7Shnz9/
-ZrZRS4qIIGJTnDQa/QWZt6jYgClMQCcYH4rjK8QGa3BHAUytNGuKg48iL9h/gvW81LINlhv2Y1VV
-HB8ertfrSMcD8vLmUC0O//yXb775y3PWifM58Q9Mx5EWHRyLDukd+qDRt8YCfWdWrsWPSeZzI8Ea
-SvKjyHlE/L6vk3kujg9GVn8iFzeGFf81zgcokIkZlKkMtB00GD1TB8+il2ognomh23Y4Yk9Cm1Rr
-xXyrCz2qHGw3eBqzvM6q0FGkSnwF1g321HM5rW9CO7hnI80PmCrK6dDywMGLa8TA5wzDV8YUT1BL
-EFugxXdI/xOzTUz+jNYQSF40UZ397qZfixnizh8v79Y7dITGzDBRyB0oEX6TRwugbdyVHPxoZxTt
-nuOMmo9nCIylDwzzaldwiIJDuOBajF2pc7gafVSQpjWrZlAwrmoEBQ1u3ZSprcGRjQwRJHo3ZnvO
-C6tbAJ1asT6zozerAC3ccTrWrs0KjieEPHAiXtATCU7tcefdc17aOk0pBNPiUY8qDNhbaLTTOfDl
-0AAYi0H584Bbmo3Fh9ai8Br0AMs5aoMMtugwE75xfcDB3qCHnTpWf1tvpnEfCFykIUePHgWdUD7h
-EUoF0lQM/Z7bWNwStzvYTotDTGWWiURabRGutvLoFaqdhmmRZKh7nUWKZmkOXrHVisRIzXvfWaCd
-Cz7uM2ZaAjUZGnI4jU7I2/MEMNTtMOB1U2NowI2cIEarRJF1QzIt4R9wKygiQeEjoCVBs2AeK2X+
-xP4AmbPz1V+2sIclNDKE23SbG9KxGBqOeb8nkIw6fgJSkAEJu8JIriOrgxQ4zFkgT7jhtdwq3QQj
-UiBnjgUhNQO400tvg4NPIjyzIAlFyPeVkoX4Sgxg+dqi+jjd/YdyqQkbDJ0G5CroeMOJG4tw4hAn
-rbiEz9B+RIJON4ocOHgKLo8bmnfZ3DCtDZOAs+4rbosUaGSKnAxGLqrXhjBu+PdPJ06LhlhmEMNQ
-3kDeIYwZaRTY5dagYcENGG/N22Ppx27EAvsOw1wdydU97P/CMlGzXIW4we3ELtyP5ooubSy2F8l0
-AH+8BRiMrj1IMtXxC4yy/AuDhB70sA+6N1kMi8zjcp1kISkwTb8Tf2k6eFhSekbu8CNtpw5hohij
-PHxXgoDQYeUhiBNqAtiVy1Bpt78LducUBxYudx94bvPV8cvrLnHH2yI89tO/VGf3VRkrXK2UF42F
-Alera8BR6cLk4myjjxv1cTRuE8pcwS5SfPj4WSAhOBK7jjdPm3rD8IjNg3PyPgZ10GsPkqs1O2IX
-QAS1IjLKYfh0jnw8sk+d3I6JPQHIkxhmx6IYSJpP/hU4uxYKxjiYbzKMo7VVBn7g9TdfT3oioy6S
-33w9eGCUFjH6xH7Y8gTtyJQGIHqnbbqUMk7J13A6UVQxa3jHtilGrNBp/6eZ7LrH6dR4UTwzvlfJ
-71J8J472918e9bfFj4GH8XAJ7sLzcUPB7qzx43tWW+Fpk7UDWGfjaj57NAXY5ufTX2GzrHR87S5O
-UjoUADIcHKCeNft8Dl30KxIP0k5d45Cgbyumrp4DY4QcWBh1p6P9slMTe+7ZEJtPEasuKns6AaA5
-v/IO9d2zyy5UveyGh5/zScNRj5byZtznV3yJhsXPH6KMLDCPBoM+sm9lx/+PWT7/90zykVMxx85/
-oGF8IqA/aiZsRxiatiM+rP5ld02wAfYIS7XFA93hIXaH5oPGhfHzWCUpsY+6a1+sKdeAwqx4aARQ
-5uwC9sDBZdQn1m/qsuRzZ1KBhSwP8Cx1LDDNyjiBlL3VBXP4XlaIiW02o7C1k5ST96mRUAei7UzC
-ZgvRL2fL3ISvZHaXlNAXFO4w/OHDj2dhvwnBkC50erwVebwLgXCfwLShJk74lD5Moad0+delqr2L
-8QlqjvNNcFiTrdc++DFhE1LoX4MHgkPe2S2fkeNmfbaUs9uJpHN/ZFPs6sTH3+BrxMSmA/jJWype
-UAYazGSW1kgr9sExdXBRZzM6KqkkuFo6zxfzfug0nyOBizS+EUPqPMcolOZGClTdxaV2RIsyx8xS
-USfzw5tkLuRvdZziDl8uFoALnmPpVxEPT8Eo8ZYTEjjjUMlZXSbVBkgQq1wfA1LugtNwuuGJDj0k
-+cSHCYjZDMfiI04b3zPh5oZcJk7gH37gJHELjh3MOS1yFz2H91k+wVEnlKA7ZqS6R/T0OGiPkAOA
-AQCF+Q9GOojnv5H0yj1rpDV3iYpa0iOlG3TIyRlDKMMRBj34N/30GdHlrS1Y3mzH8mY3ljdtLG96
-sbzxsbzZjaUrEriwNn5lJKEvhtU+4ehNlnHDTzzMWTxbcjtM3MQETYAoCrPXNjLF+ctekIuP+ggI
-qW3n7JkeNskvCWeEljlHwzVI5H48z9L7epN57nSmVBrdmadi3NltCUB+38MoojyvKXVneZvHVRx5
-cnGT5lMQW4vuuAEwFu1cIA6bZneTKQd6W5ZqcPlfn3748B6bI6iByXSgbriIaFhwKsP9uLxRXWlq
-9oEFsCO19HNyqJsGuPfIIBuPssf/vKVkD2QcsaZkhVwU4AFQSpZt5iYuhWHruc5w0s+Zyfnc6UQM
-smrQTGoLkU4vL9+efjodUPRv8L8DV2AMbX3pcPExLWyDrv/mNrcUxz4g1DrM1Rg/d04erRuOeNjG
-GrAdH7714OgxBrs3YuDP8t9KKVgSIFSk48BPIdSj90BftE3o0McwYidzzz1kY2fFvnNkz3FRHNHv
-O4FoD+Cfe+IeYwIE0C7U0OwMms1US+lb87qDog7QR/p6X7wFa2+92jsZn6J2Ej0OoENZ22y7++cd
-2bDRU7J6ffb9+fuL89eXp59+cFxAdOU+fDw8Emc/fhaUKoIGjH2iGLMkKkxKAsPiVimJeQ7/1Rj5
-mdcVx4uh19uLC31os8I6FUxcRpsTwXPOaLLQOHzGAWn7UKciIUap3iA5BUGUuUMFQ7hfWnExisp1
-cjPVGU3RWa311ksXepmCMDrijkD6oLFLCgbB2WbwilLQK7MrLPkwUBdJ9SClbbTNEUkpPNjJHHCO
-wsxBixczpc7wpOmsFf1V6OIaXkeqSBPYyb0KrSzpbpgp0zCOfmjPuhmvPg3odIeRdUOe9VYs0Gq9
-Cnluuv+oYbTfasCwYbC3MO9MUqYIpU9jnpsIsREf6oTyHr7apddroGDB8MyvwkU0TJfA7GPYXItl
-AhsI4MklWF/cJwCE1kr4ZwPHTnRA5pioEb5ZzQ/+FmqC+K1/+aWneVWmB/8QBeyCBGcVhT3EdBu/
-hY1PJCNx9uHdKGTkKEtX/K3G3H5wSCgA6kg7pTLxYfpkqGS60Kkmvj7AF9pPoNet7qUsSt293zUO
-UQKeqSF5Dc+UoV+ImV8W9hinMmqBxrIFixmW/7kZCeazJz4uZZrqZPXztxdn4DtiJQVKEB/BncFw
-HC/B03Sdh8fliS1QeNYOr0tk4xJdWMq3mEdes96gNYoc9fZSNOw6UWC426sTBS7jRLloD3HaDMvU
-AkTIyrAWZlmZtVttkMJuG6I4ygyzxOSypFxWnyeAl+lpzFsi2CthnYaJwPOBcpJVJnkxTWagR0Hl
-gkIdg5AgcbEYkTgvzzgGnpfK1DDBw2JTJjfLCs85oHNE9RPY/MfTzxfn76mm4Ohl43X3MOeYdgJj
-zic5wWxBjHbAFzcDELlqMunjWf0KYaD2gT/tV5yocsIDdPpxYBH/tF9xEdmJsxPkGYCCqou2eOAG
-wOnWJzeNLDCudh+MHzcbsMHMB0OxSKxZ0Tkf7vy6nGhbtkwJxX3Myycc4CwKm52mO7vZae2PnuOi
-wBOv+bC/Ebztky3zmULX286bbXlw7qcjhVjPChh1W/tjmESxTlM9HYfZtnELbWu1jf0lc2KlTrtZ
-hqIMRBy6nUcuk/UrYd2cOdDLqO4AE99qdI0k9qrywS/ZQHsYHiaW2J19iulIFS1kBDCSIXXhTg0+
-FFoEUCCUCDx0JHc82j/y5uhYg4fnqHUX2MYfQBHqtFwq98hL4ET48hs7jvyK0EI9eixCx1PJZJbb
-lDH8rJfoVb7w59grAxTERLEr4+xGDhnW2MD8yif2lhAsaVuP1FfJdZ9hEefgnN5v4fCuXPQfnBjU
-WozQaXcrN2115JMHG/RWhewkmA++jNeg+4u6GvJKbjmH7i2E2w71YYiYiAhN9Tn8MMRwzG/hlvVp
-APdSQ8NCD++3LaewvDbGkbX2sVXgFNoX2oOdlbA1qxQdyziVhcYXtV5AY3BPGpM/sE91zpD93VMy
-5sSELFAe3AXpzW2gG7TCCQOuXOKyz4Qy45vCGv1uLu9kCkYDjOwQCx9+tYUPo8iGU3pTwr4Yu8vN
-5aYfN3rTYHZsKjPQM1MFrF+UyeoQ0emN+OzCrEEGl/oXvSWJs1vykt/8/Xws3rz/Cf59LT+AKcXK
-xbH4B6Ah3uQl7C+59JbuRMCijoo3jnmtsLyRoNFRBV8fgW7bpUdnPBbR1SZ+mYnVlAITbMsV31kC
-KPIEqRy98RNMDQX8NkVeLW/UeIp9izLQL5EG2+tesFbkULeMltUqRXvhREma1bwaXJy/OXv/8Syq
-7pHDzc+BE0Xxc7NwOvqMuMTzsLGwT2Y1Prl2HOcfZFr0+M1602lqaHDTKULYlxR2o8n3YcR2cxGX
-GDkQxWaezyJsCSzPZXvVGhzpkbO/fNDQe1YWYQ1H+hSt8ebxMVBD/NJWRANoSH30nKgnIRRPsX6M
-H0eDflM8FhTahj/7t+u5GxnXhUA0wTamzayHfnerC5dMZw3PchLhdWKXwdSGpkmsVtOZWzP4IRP6
-OhPQcnTOIRdxnVZCZiC5tMmneyVA07tlfiwhzCpszqj2jcI06TreKCcJKVZigKMOqDQeD2QoYgh7
-8B/jW7YHWH8oai5kBuiEKO2fcqerqmdLlmDeEhH1ehIP1kn20s3n0RTmQXmHPGscWZgnuo2M0Y2s
-9Pz5wXB09aLJdKCo9Mwr8p0VYPVcNtkD1Vns7+8PxH887P0wKlGa57fglgHsXq/lgl5vsdx6cna1
-up69eRMBP86W8goeXFP03D6vMwpN7uhKCyLtXwMjxLUJLTOa9i27zEG7kg+auQUfWGnL8XOW0KVF
-GFqSqGz13U8YdjLSRCwJiiGM1SxJQg5TwHps8hrr8zDMqPlF3gPHJwhmjG/xhIy32kv0MCmX1nKP
-RedEDAjwgHLLeDQqcKYKNcBzcrnRaE7Os6RqSkueu4enupC/sncRab4S8Rolw8yjRQyn1NNj1cbD
-zneyqLdjyWdXbsCxNUt+/RDuwNogafliYTCFh2aRZrksZ8ac4ools6RywJh2CIc70xVMZH2ioAel
-Aah3sgpzK9H27Z/suriYfqBz5AMzkk4fquy1VhwcirNWgmEUNeNTGMoS0vKt+TKCUd5TWFt7At5Y
-4k86qIp1Bd7tG26JY53pWzU4f6O5agPg0E1OVkFadvR0hHN9mIXPTLvlLgz80BadcLtLyqqO04m+
-vGGCDtvEHqxrPG1p3M6iT+utgJOfgwd8oLP4wXEwWTZIT0zCNVUaJ2KhQxSRW23mF2YVOXp5R+wr
-gU+BlJlPTI20CSJdWXa1xac6Z9NR8QjqK1PQtMUzN5U0nSIUF/Mx5TmZEogtXrTBpX2nhfjuRAxf
-jMWfWxuhWbHBW5kA5Wfz6Nk89H0y6np1fNTYme7GswVhK5CX10+ebppMaXphX/r5w3110iFuAFcg
-O4tEzg+eKcSOcf5SqBpKM6/tnEIzxur0PZv1pAuzm3IVqkqbgle/bhSKo1qM/2kHMRXfWg9wcSwK
-LVsgW9BvEk9ayX/20jVMDNTo+SuLnsuk73AKv+HFKfBeE9R1dLYeWuoMewu2Z0+uyyj5CKpp2HD8
-gx7Vk0SpnSPeaYXHk43Euaz/BB4O6ZIZYpqvWsfC/07m4aT9bYeLHSy/+XoXnq6C6a2Y6FnQx1Yx
-8KK3SxeahTef/qCXxzJ9Xf940dkqGE9d/kdkBTwsZY+XsF3S9WQq6V79tMING6ZLL2N+g4a3Lo5t
-QMMoHjxwGrpJdPipbnsrf1jpoAaubsNd0+f+u+auWwR25uYMuTN3v8LPpYHuu51f+mjAm0lNiEdl
-pjdqoV/juMpirFMX+gOj+oPkdzvhTLfonofAmEQJDLMSm2rsjW1YxTP3O+bhHPAltm5BZ69Fak27
-o1jaHP8Yc8I5B/jc1nhTIslccyB7p3Qr2YRTEyfy5kZNYrwRb0JbGkqj6fiqxkl+RxeayVhtjG+L
-18YACMNNOuHRzWkGxoBtE9/My1kozv0ggoamXE0n+VMlc45TaUcawEUcn6L+Jv7J2ZuDVGJYUdVl
-UcLeY6Dvb+X0iL6M0gaoCZesYnVrUDc9xvo6TxyCc3JMEShHxXg/41EHCME63rmcitOhJxP7Dvjl
-eVPsnowtQ8isXskyrpqLXvzD2ATsSzMClf7iAjsBkUYyW5ziIpZY/nCQwpCE/f6VduW9rcyOCveR
-1XqPZyvqoQNtTymed2yP4ebk3l705l4wNKdrgV1XwjZruM9ebgNLYW4tI12pIxT8Vt+kxPdzcvwU
-nRGHj0Du3cI3PwnYqjV2hSwazjNXMXSvzsHabbLFfTfidbige/ddaztjx/f1hmWWjhOypbGlonbg
-ehVPM9qo2bdjvt4D+3Y/J/uJ+3YP/iP37fr+QjA4Gh+tD3qztB/Y4LOacC8DbBgB+kyASHh+2LpK
-zpjMoZvzDJvr5H5gL+NlnekUUhkzgRzZvSWKQPClf8pNEPUu5dq1b/elix5/f/Hh9ekF0WJyefrm
-P0+/p5wYDFK3bNajAxtZfsDUPvCyb90gh85j6Bu8wbbndk0uIdEQOu87R8A9EPrLhfoWtK3I3Nfb
-OnTKLrqdAPHd025B3aayeyF3/DOd4u9mL7TSZAP9lHMazS/nYNg8MucjLA7N+Yd534SstYx2Inrb
-Fs7JLuyqE+236vsYt0QbRzbHlVYAI9XIXzZQbQoWbDiUHZX2/yKBEnOx2MvcZQJSOJPOnXp0nR6D
-qvz/F0MJyi7G0zZ2GMf2XmNqx0F5ZS/sxhO3mYwMQbxqq0F3fq6wz2W6hQpBwApP3xjHiBj9p4+x
-7KHvMyWuDqiu8wCVzbX9hWumndy/J3i0W9mblxTnh/DhFjRe1Kl7XGv7dDqQ80dnAPnCKSQAzXcI
-dG7EUwF7o8/ECnG6ESFsJPWxJOYmEh31tWkO8mg3HewNrZ6Lg21Vf27VmxAvtjectwrrdI8j7qEe
-6KFqU1vlWGBMkttWzie+I8h8iCToqiXP+cCTS33DL3y9u3pxbEO6yO/42lEklMwzcAz7lZMMt/N6
-P6c7MUs5pmwp3LM5xaC6xbUDlX2CbXucTkXAln2QOV1mSAPvfX9UxfTwri0ftDG1rHcMUxLDZ2pE
-03JqKDTu9smoO91GbXWBcD3II4B0VCDAQjAd3ejk5204yXb4XO8KpzVdjOrG9UNHKihXx+cI7mF8
-vwa/dneq43xUd0bR9OcGbQ7USw7Czb4Dtxp5IZHtJqE99YYPtrgAXBLb3//FI/p3s8hs96NdfrVt
-9bK3DIt9WUw8xHyMFonM4wiMDOjNIWlrzFY3go63gDR0dBmqmRvyBTp+lMyI1x7TBoOc2Yn2AKxR
-CP4PNIke9w==
+eJzFPf1z2zaWv/OvwMqToZTIdOK0vR2nzo2TOK3v3MTbpLO5dT1aSoIs1hTJEqRl7c3d337vAwAB
+kpLtTXdO04klEnh4eHhfeHgPHQwGJ0Uhs7lY5fM6lULJuJwtRRFXSyUWeSmqZVLO94u4rDbwdHYT
+X0slqlyojYqwVRQET7/yEzwVn5eJMijAt7iu8lVcJbM4TTciWRV5Wcm5mNdlkl2LJEuqJE6Tf0CL
+PIvE06/HIDjLBMw8TWQpbmWpAK4S+UJcbKplnolhXeCcX0Tfxi9HY6FmZVJU0KDUOANFlnEVZFLO
+AU1oWSsgZVLJfVXIWbJIZrbhOq/TuSjSeCbF3//OU6OmYRiofCXXS1lKkQEyAFMCrALxgK9JKWb5
+XEZCvJGzGAfg5w2xAoY2xjVTSMYsF2meXcOcMjmTSsXlRgyndUWACGUxzwGnBDCokjQN1nl5o0aw
+pLQea3gkYmYPfzLMHjBPHL/LOYDjxyz4JUvuxgwbuAfBVUtmm1IukjsRI1j4Ke/kbKKfDZOFmCeL
+BdAgq0bYJGAElEiT6UFBy/G9XqHXB4SV5coYxpCIMjfml9QjCs4qEacK2LYukEaKMH8np0mcATWy
+WxgOIAJJg75x5omq7Dg0O5EDgBLXsQIpWSkxXMVJBsz6UzwjtP+aZPN8rUZEAVgtJX6rVeXOf9hD
+AGjtEGAc4GKZ1ayzNLmR6WYECHwG7Eup6rRCgZgnpZxVeZlIRQAAtY2Qd4D0WMSl1CRkzjRyOyb6
+E02SDBcWBQwFHl8iSRbJdV2ShIlFApwLXPH+48/i3embs5MPmscMMJbZ6xXgDFBooR2cYABxUKvy
+IM1BoKPgHP+IeD5HIbvG8QGvpsHBvSsdDGHuRdTu4yw4kF0vrh4G5liBMqGxAur339BlrJZAn/+5
+Z72D4GQbVWji/G29zEEms3glxTJm/kLOCL7XcF5HRbV8BdygEE4FpFK4OIhggvCAJC7NhnkmRQEs
+liaZHAVAoSm19VcRWOFDnu3TWrc4ASCUQQYvnWcjGjGTMNEurFeoL0zjDc1MNwnsOq/ykhQH8H82
+I12UxtkN4aiIofjbVF4nWYYIIS8E4V5IA6ubBDhxHolzakV6wTQSIWsvbokiUQMvIdMBT8q7eFWk
+cszii7p1txqhwWQlzFqnzHHQsiL1SqvWTLWX9w6jLy2uIzSrZSkBeD31hG6R52MxBZ1N2BTxisWr
+WufEOUGPPFEn5AlqCX3xO1D0RKl6Je1L5BXQLMRQwSJP03wNJDsKAiH2sJExyj5zwlt4B/8CXPw3
+ldVsGQTOSBawBoXIbwOFQMAkyExztUbC4zbNym0lk2SsKfJyLksa6mHEPmDEH9gY5xp8yCtt1Hi6
+uMr5KqlQJU21yUzY4mVhxfrxFc8bpgGWWxHNTNOGTiucXlos46k0LslULlAS9CK9sssOYwY9Y5It
+rsSKrQy8A7LIhC1Iv2JBpbOoJDkBAIOFL86Sok6pkUIGEzEMtCoI/ipGk55rZwnYm81ygAqJzfcM
+7A/g9g8Qo/UyAfrMAAJoGNRSsHzTpCrRQWj0UeAbfdOfxwdOPVto28RDLuIk1VY+zoIzenhaliS+
+M1lgr7EmhoIZZhW6dtcZ0BHFfDAYBIFxhzbKfM1VUJWbI2AFYcaZTKZ1goZvMkFTr3+ogEcRzsBe
+N9vOwgMNYTp9ACo5XRZlvsLXdm6fQJnAWNgj2BMXpGUkO8geJ75C8rkqvTBN0XY77CxQDwUXP5++
+P/ty+kkci8tGpY3b+uwKxjzNYmBrsgjAVK1hG10GLVHxJaj7xHsw78QUYM+oN4mvjKsaeBdQ/1zW
+9BqmMfNeBqcfTt6cn05++XT68+TT2edTQBDsjAz2aMpoHmtwGFUEwgFcOVeRtq9Bpwc9eHPyyT4I
+JomafPcNsBs8GV7LCpi4HMKMxyJcxXcKGDQcU9MR4thpABY8HI3Ea3H49OnLQ4JWbIoNAAOz6zTF
+hxNt0SdJtsjDETX+jV36Y1ZS2n+7PPrmShwfi/C3+DYOA/ChmqbMEj+ROH3eFBK6VvBnmKtREMzl
+AkTvRqKADp+SXzziDrAk0DLXdvq3PMnMe+ZKdwjSH0PqAThMJrM0VgobTyYhEIE69HygQ8TONUrd
+EDoWG7frSKOCn1LCwmbYZYz/9KAYT6kfosEoul1MIxDX1SxWklvR9KHfZII6azIZ6gFBmEliwOFi
+NRQK0wR1VpmAX0uchzpsqvIUfyJ81AIkgLi1Qi2Ji6S3TtFtnNZSDZ1JARGHwxYZUdEmivgRXJQh
+WOJm6UajNjUNz0AzIF+agxYtW5TDzx74O6CuzCYON3q892KaIab/wTsNwgFczhDVvVItKKwdxcXp
+hXj5/HAf3RnYc84tdbzmaKGTrJb24QJWy8gDI8y9jLy4dFmgnsWnR7thriK7Ml1WWOglLuUqv5Vz
+wBYZ2Fll8TO9gZ05zGMWwyqCXid/gFWo8Rtj3Ify7EFa0HcA6q0Iill/s/R7HAyQmQJFxBtrIrXe
+9bMpLMr8NkFnY7rRL8FWgrJEi2kcm8BZOI/J0CSChgAvOENKrWUI6rCs2WElvBEk2ot5o1gjAneO
+mvqKvt5k+Tqb8E74GJXucGRZFwVLMy82aJZgT7wHKwRI5rCxa4jGUMDlFyhb+4A8TB+mC5SlvQUA
+AkOvaLvmwDJbPZoi7xpxWIQxeiVIeEuJ/sKtGYK2WoYYDiR6G9kHRksgJJicVXBWNWgmQ1kzzWBg
+hyQ+151HvAX1AbSoGIHZHGpo3MjQ7/IIlLM4d5WS0w8t8pcvX5ht1JLiK4jYFCeNLsSCjGVUbMCw
+JqATjEfG0RpigzU4twCmVpo1xf4nkRfsjcF6XmjZBj8AdndVVRwdHKzX60hHF/Ly+kAtDr7983ff
+/fk568T5nPgHpuNIiw61RQf0Dj3a6HtjgV6blWvxY5L53EiwhpK8MnJFEb8f6mSei6P9kdWfyMWN
+mcZ/jSsDCmRiBmUqA20HDUZP1P6T6KUaiCdknW3b4Yj9Em1SrRXzrS70qHLwBMBvmeU1muqGE5R4
+BtYNduhzOa2vQzu4ZyPND5gqyunQ8sD+iyvEwOcMw1fGFE9QSxBboMV3SP8zs01M3pHWEEheNFGd
+3fOmX4sZ4s4fLu/W13SExswwUcgdKBF+kwcLoG3clRz8aNcW7Z7j2pqPZwiMpQ8M82rHcoiCQ7jg
+WoxdqXO4Gj1ekKY1q2ZQMK5qBAUNTuKUqa3BkY0MESR6N2azzwurWwCdWpFDEx8wqwAt3HE61q7N
+Co4nhDxwLF7QEwku8lHn3XNe2jpNKaDT4lGPKgzYW2i00znw5dAAGItB+cuAW5ptysfWovAa9ADL
+OQaEDLboMBO+cX3Awd6gh506Vn9bb6ZxHwhcpCHHoh4EnVA+5hFKBdJUDP2e21jcErc72E6LQ0xl
+lolEWm0Rrrby6BWqnYZpkWSoe51FimZpDl6x1YrESM1731mgfRA+7jNmWgI1GRpyOI2OydvzBDDU
+7TB8dl1joMGNwyBGq0SRdUMyLeEfcCsovkHBKKAlQbNgHipl/sT+AJmz89VftrCHJTQyhNt0mxvS
+sRgajnm/J5CMOhoDUpABCbvCSK4jq4MUOMxZIE+44bXcKt0EI1IgZ44FITUDuNNLb4ODTyI8ASEJ
+Rch3lZKFeCYGsHxtUX2Y7v5DudQEIYZOA3IVdPTi2I1sOFGN41aUw2doP75BZyVFDhw8BZfHDfS7
+bG6Y1gZdwFn3FbdFCjQyxWEGIxfVK0MYN5j8p2OnRUMsM4hhKG8g70jHjDQK7HJr0LDgBoy35u2x
+9GM3YoF9h2GuDuXqDvZ/YZmoWa5Cipm0YxfuR3NFlzYW2/NkOoA/3gIMRlceJJnq+AVGWf6JQUIP
+etgH3ZsshkXmcblOspAUmKbfsb80HTwsKT0jd/CJtlMHMFGMeB68L0FA6OjzAMQJNQHsymWotNvf
+BbtzigMLl7sPPLf58ujlVZe4420RHvvpX6rTu6qMFa5WyovGQoGr1TXgqHRhcnG20YeX+nAbtwll
+rmAXKT5++iKQEBzXXcebx029YXjE5t45eR+DOui1e8nVmh2xCyCCWhEZ5SB8PEc+HNnHTm7HxB4B
+5FEMs2NRDCTNJ/8MnF0LBWPszzcZxtHaKgM/8Pq7byY9kVEXye++GdwzSosYfWI/bHmCdmROKtg1
+21LGKbkaTh8KKmYN69g2xYj1OW3/NI9d9ficGi0b++5vgR8DBUPqEnyE5+OGbN2p4sd3p7bC03Zq
+B7DObtV89mgRYG+fT3+DHbLSQbXbOEnpXAEmv7+PytVs7jle0a89PEg7FYxDgr79l7p8DtwQcjRh
+1J2OdsZOTMC5ZxdsPkWsuqjs6RyC5gjMywtwjz+7ULUFM4z7nI8XDntUkzfjPmfia9Qqfv4QDWSB
+eTQY9JF9Kzv+f8zy+b9mkg+cijm5/gOt4SMB/VEzYePB0LTx8GH1L7trdw2wB5inLW7nDrewOzSf
+VS6Mc8cqSYmnqLueijWlK1BsFU+KAMqc/b4eOLiM+tD7bV2WfHRNKrCQ5T4ex44FZmoZz6/XxOyJ
+gw+yQkxssxnFqp28nrxPjYQ6+mxnEjb7hn45W+YmZiWz26SEvqBwh+GPH386DftNCMZxodPDrcjD
+/QaE+wimDTVxwsf0YQo9pss/L1XtrYtPUJMRYCLCmmy99sEPBJs4Qv8a3BMR8g5s+Zgdd+izpZzd
+TCSlDiCbYlcnKP4WXyMmNqPAz/9S8YKS2GAms7RGWrHjjdmHizqb0flIJcG/0qnCmDpECQEc/luk
+8bUYUuc5hp40N1J06jYutfdZlDkmp4o6mR9cJ3Mhf6/jFLf1crEAXPDwSr+KeHiKQIl3nNPASYtK
+zuoyqTZAgljl+uyP0h+chtMNT3ToIcnHPExATIg4Ep9w2vieCTc35DLBAf/EAyeJ+27s4CQrRPQc
+3mf5BEedUI7vmJHqnsvT46A9Qg4ABgAU5j8Y6cid/0bSK/eAkdbcJSpqSY+UbqQhJ2cMoQxHGOng
+3/TTZ0SXt7Zgeb0dy+vdWF63sbzuxfLax/J6N5auSODC2qCVkYS+wFX7WKM338aNOfEwp/Fsye0w
+9xNzPAGiKMwG28gUp0B7kS0+3yMgpLadA2d62OTPJJxUWuYcAtcgkfvxEEtv5k3yutOZsnF0Z56K
+cWe35RD5fQ+iiFLFptSd5W0eV3HkycV1mk9BbC264wbAWLTTiThWmt1OphzdbVmqwcV/ff7x4wds
+jqAGJr2BuuEiomHBqQyfxuW16kpTs/krgB2ppZ+IQ900wL0HRtZ4lD3+5x1leCDjiDVlKOSiAA+A
+srpsMzf3KQxbz3WSlH7OTM6HTcdikFWDZlJbiHRycfHu5PPJgEJ+g/8duAJjaOtLh4uPaWEbdP03
+t7mlOPYBodaxrcb4uXPyaN1wxP021oDt+PCtB4cPMdi9YQJ/lv9SSsGSAKEiHfx9DKEevAf6qm1C
+hz6GETvJf+7JGjsr9p0je46L4oh+37FDewD/sBP3GBMggHahhmZn0GymWkrfmtcdFHWAPtDX++ot
+WHvr1d7J+BS1k+hxAB3K2mbb3T/vnIaNnpLVm9Mfzj6cn725OPn8o+MCoiv38dPBoTj96Yug/BA0
+YOwTxZgaUWEmEhgWt9BJzHP4r8bIz7yuOEgMvd6dn+uTmhWWumDuM9qcCJ5zGpOFxkEzjkLbhzr/
+CDFK9QbJqSmidB2qOcL90orrWVSu86OpVGmKzmqtt166VszUlNG5dgTSB41dUjAITjGDV5TFXpld
+YckngLrOqgcpbaNtYkhKQcFOuoBz/mVOV7xAKXWGJ01nregvQxfX8CpSRZrATu5VaGVJd8P0mIZx
+9EN7wM149WlApzuMrBvyrLdigVbrVchz0/1HDaP9XgOGDYO9g3lnktJDKAMbk9tEiI34JCeUd/DV
+Lr1eAwULhgd9FS6iYboEZh/D5losE9hAAE8uwfriPgEgtFbCPxA4cqIDMsfsjPDtar7/l1ATxG/9
+6689zasy3f+bKGAXJDiVKOwhptv4HWx8IhmJ04/vRyEjR6m54i81lgeAQ0IBUEfaKX+JT9AnQyXT
+hc4v8fUBvtB+Ar1udS9lUeru/a5xiBLwRA3Ja3iiDP1CTPeysMc4lVELNFY+WMywgtBNQzCfPfFp
+KdNU57ufvTs/Bd8RizFQgvjc7RSG43gJHqHr5DuucGyBwgN2eF0iG5fowlKSxTzymvUGrVHkqLeX
+l2HXiQLD3V6dKHAZJ8pFe4jTZlimnCBCVoa1MMvKrN1qgxR22xDFUWaYJSYXJSWw+jwBvExPY94S
+wV4JSz1MBJ5PkZOsMhmLaTIDPQoqFxTqGIQEiYv1jMR5ecYx8LxUpgwKHhabMrleVni6AZ0jKsHA
+5j+dfDk/+0BlCYcvG6+7hznHtBMYcxLJMaYIYrQDvrhpf8hVk0kfz+pXCAO1D/xpv+LslGMeoNOP
+A4v4p/2K69COnZ0gzwAUVF20xQM3AE63PrlpZIFxtftg/LgpgA1mPhiKRWLZi070cOfX5UTbsmVK
+KO5jXj7iAGdR2JQ03dlNSWt/9BwXBZ5zzYf9jeBtn2yZzxS63nTebEt+cz8dKcSSWMCo29ofw2SH
+dZrq6TjMto1baFurbeyvmRMrddrNMhRlIOLQ7TxymaxfCevmzIFeGnUHmPheo2sksVeVD37NBtrD
+8DCxxO7sU0xHKmMhI4CRDKlrf2rwodAigAKh7N+hI7nj0dNDb46ONbh/jlp3gW38ERShzsWlGo+8
+BE6EL7+z48ivCC3Uo0cidDyVTGa5zRPDz3qJXuULf469MkBBTBS7Ms6u5ZBhjQ3MZz6xt4RgSdt6
+pL5MrvoMizgD5/RuC4d35aL/4MSg1mKETrsbuWmrI5882KC3FGQnwXzwZbwG3V/U1ZBXcss5dG8t
+3Xao90PE7ENoqk/fhyGGY34Pt6xPA7iXGhoWeni/bzmF5bUxjqy1j62qptC+0B7srIStWaXoWMYp
+TjS+qPUCGoN73Jj8gX2qE4Xs7546MScmZIHy4C5Ib24D3aAVThhwuRJXjiaUDt9U0+h3c3krUzAa
+YGSHWO3wm612GEU2nNKbB/bV2F1sLjb9uNGbBrMjU46BnpkqYP2iTFYHiE5vxGcXZg0yuNS/6i1J
+nN2Ql/z2r2dj8fbDz/DvG/kRTCkWP47F3wAN8TYvYX/J1bt0rQJWclS8ccxrhRWSBI2OKvgGCnTb
+Ljw647GILjHxa0usphSYVVuu+NoTQJEnSBXtjZ9gCifgt6nsanmjxlPsW5SBfok02F7sggUiB7pl
+tKxWKdoLJ0rSrObl4Pzs7emHT6dRdYccbn4OnCiKn5CF09FnxCWeh42FfTKr8cmV4zj/KNOix2/W
+m05TOIObThHCvqSwG02+UiO2m4u4xMiBKDbzfBZhS2B5rtWr1uBIj5z95b2G3rOyCGs40qdojTeP
+j4Ea4te2IhpAQ+qj50Q9CaF4ikVj/Dga9JvisaDQNvx5erOeu5FxXf1DE2xj2sx66He3unDJdNbw
+LCcRXsd2GUxBaJrEajWduYWCHzOhb0QBLUfnHHIR12klZAaSS5t8upoCNL1b28cSwqzC5owK3ihM
+k67jjXKSkGIlBjjqgKrr8UCGIoawB/8pvmF7gEWHouZaaIBOiNL+KXe6qnq2ZAnmLRFRryfxYJ1k
+L918Hk1hHpR3yLPGkYV5otvIGF3LSs+fHwxHly+aTAeKSs+8yt5ZAVbPZZM9UJ3F06dPB+Lf7/d+
+GJUozfMbcMsAdq/Xck6vt1huPTm7Wl3P3ryJgB9nS3kJD64oem6f1xmFJnd0pQWR9q+BEeLahJYZ
+TfuWXeagXckHzdyCD6y05fglS+jeIwwtSVS2+vooDDsZaSKWBMUQxmqWJCGHKWA9NnmNRXkYZtT8
+Iu+A4xMEM8a3eELGW+0lepiUQGu5x6JzLAYEeEC5ZTwaVTVTWRrgObnYaDQnZ1lSNfUkz93DU30X
+QGWvM9J8JeI1SoaZR4sYTn2nx6qNh53vZFFvx5LPLt2AY2uW/Po+3IG1QdLyxcJgCg/NIs1yWc6M
+OcUVS2ZJ5YAx7RAOd6ZbnMj6REEPSgNQ72QV5lai7ds/2XVxMf1I58j7ZiSdPlTZm7E4OBRnrQTD
+KGrGpzCUJaTlW/NlBKN8oLC29gS8scSfdFAViwm8CzzcusY60xdzcP5Gc1sHwKHLoKyCtOzo6Qjn
+BjILn5l2y3Ua+KEtOuF2m5RVHacTff/DBB22iT1Y13jaeridlZ7WWwEnPwcPeF+n7oPjYLJskJ6Y
+emtKM47FQocoIrfEzK/GKnL08g7ZVwKfAikzn5jCaBNEurTsaitOdc6mo+IR1DNTxbTFMzflM53K
+ExfzMeU5mbqHLV60waV9kYV4fSyGL8bi29ZGaFZs8GInQPnJPHoyD32fjLpeHh02dqa78WxB2Ark
+5dWjp5smU5pe2Jdzfn9fnXSIG8AVyM4ikfP9JwqxY5y/FqqG0sxrO6fQjLEkfc9mPelq7KZGhUrR
+puDVrxuF4qgW43/aQUyZt9YDXBGLQssWyFbxm8STVvKfvbcNEwM1ev7Koucy6Tucwm94Wwq81wR1
+HZ2th5Y6rd6C7dmT69pJPoJqGjYcf69H9ShRaueId1rh8WQjcS7rP4KHQ7pZhpjmWetY+F/JPJy0
+v+1wsYPld9/swtNVML1lEj0Lurt2gZe6XbDQLLf59Ie6PEbp6/pVAuNAaUQHvD5z+SP5a0eYD8y3
+uuQ2L3iF1yvSWS/allS6/gfvSfkeLXQIaBNO6VmwFuCS1As8mr2l2yJPFKWR4aUv3xy+GJtaWwak
+J/AyevlMX6pI3cx1Ar6zOtabIHip+x1G/+YASyq/t33V2RbQtI5btyv5g4UUjxpFE0uHxnLcX1nR
+rFks8BbChpjspNorNd6D2zAFh8FcJ5qD5wM7u6gPXVdjNNK7TbVtEeCtwUP72SY5D+raKFJEepew
+bVOeuxTno0VB9+q3ILgXR85fxvwGfaq6OLKxKmNT8Cxx6OZH4qe66a3kYnuCxrW6CXdNn/vvmrtu
+EdiZm/SAztz9ik2XBrrvdivaRwOOE2hCPKjooNH4/cbEtQNjnZXSH/PWHyS/2wlnusWs3AfG5MBg
+BJ3YU2NvzP4qnrnfMcVqn684dgt0e52N1rQ7NqPN8Q/xFDidBJ/bmn3KEZprDuSNB91ZN+Gs04m8
+vlaTGO9LnNBulTKkOtsQs/95T9fdyVhtzLYFrwECEIabdC6rm64OjAG6ku9t5gQj574XQUNTGq6T
+16uSOZsEvUcCcBGHHqm/CW1zYu4glRgxVnVZlLCtHOjbfTnzpS9ZuAFqImGrWN0Y1E2Psb7slRQr
+pVuZol4OeLbSZoAIbMQ7pmEyse+AV543FxckY8sMMqtXsoyr5tIe/4w9Ea+dEaiMGxfXiXM1Utni
+EhexxPKGgxRGmuz3Z7BD83anO24qGFlt93B2oh46dvqYSxAcY2S4OLmzF/a5F0XN6bJo1zu0zRqu
+s5cUwTKY2+dIR+qgE7/VN2Lxra0cEkf/0uEfkHe3ltHP67bqjL1bi4bzzFUI3SuQsAafjHPfzYYd
+DujeYdjaodrxfX1hGaXjYW5pbKmoffJehdOMNmpCMZiCeU8oxk+zf2QoxoP/wFCMvocSDI3GR+uB
+3sT7e2I2rB7cSx0bRoA+EyASHgm3rgQ0pnLoprEXuUruBvaKZtaVTm2cMQ/Ikd3bvggEX96o3Jxf
+73K1XaEYX7ro8Q/nH9+cnBMtJhcnb//z5AdKc8Jzh5atenCsKsv3mdr7XkK1G7fSqSl9gzfY9ty5
+ylVBGkLnfedUvwdCfwVY34K2FZn7eluHTiVNtxMgvnvaLajbVHYv5I5fpqs23ISUVuZzoJ9ymqr5
+5Zz1m0fmyIvFoTnSMu+bUwgto50g7baFcxJGu+pE+6v6Xs0tAeSRTVumFcDDB+Qve/ZgalBshJsd
+lPb/OINyrbF+z9xJA1I4k87diHQtIoOq/P9DRwnKLsa9HTuKY3vbNbXjcxZlr3HHQ9SZjAxBvAK6
+QXd+rrDPZbqFCkHACk/f/MeIGP2nTybtOf4TJS73qVR3H5XNlf2Fa6ad278meFpf2Ru0FKf88Hkl
+NF7UqXsCb/t0OpDTR8c6+cKpDQHNdwB0bsRTAXujv8QKcboRIWwctUuG6aZER339nYM82k0He0Or
+52J/WyGnW8goxIvtDeetWknd45B7qHt6qNqUyzkWGPMet1VoitcEmc8FBV2Z5TkfeBitt/3w9fby
+xZGN0iO/42tHkVB+1sAx7JdOfuPOaxqd7sQs5ZgS4HCv5tT36hZXDlT2CbbtbTpFHlv2PyZhgCEN
+vPf9ITPTw7vMftDG1LLeEUxJDJ+oEU3LKYvRuNsno+50G7XVBcIlPg8A0lGBAAvBdHSjk3K54bzp
+4XO9G5zWdMGte1QTOlJB6Vc+R3AP4/s1+LW7U2nug7oziqY/N2hzoF5yEG72HbjVyAuFbDcJ7ak3
+fLDFBeAq5/7+Lx7Qv5sYaLsf7vKrbauXvZV17MtiLimm2LRIZB5HYGRAbw5JW2MBghF0vNiloaPL
+UM3ckC/Q8aP8VLy+mjYY5MxOtAdgjULwf2RtvCc=
 """)
 
 ##file ez_setup.py
@@ -2121,93 +2138,93 @@
 
 ##file distribute_setup.py
 DISTRIBUTE_SETUP_PY = convert("""
-eJztPF1z2ziS7/oVOLlcpHISE2fm5q5cp6nKTDyzrs0mqTjZfUhcMkRCEsf8GpC0ov31190ACICk
-ZOdm9uGqzrtjS0Sj0ejvboA5+7fq0OzKYjKdTn8qy6ZuJK9YksLfdN02gqVF3fAs400KQJPrDTuU
-LdvzomFNydpasFo0bdWUZVYDLI5KVvH4nm9FUKvBqDrM2W9t3QBAnLWJYM0urSebNEP08AWQ8FzA
-qlLETSkPbJ82O5Y2c8aLhPEkoQm4IMI2ZcXKjVrJ4L+8nEwY/GxkmTvUr2icpXlVygapXVlqCd5/
-FM4GO5Ti9xbIYpzVlYjTTRqzByFrYAbSYKfO8TNAJeW+yEqeTPJUylLOWSmJS7xgPGuELDjw1ADZ
-Hc9p0RigkpLVJVsfWN1WVXZIi+0EN82rSpaVTHF6WaEwiB93d/0d3N1Fk8lHZBfxN6aFEaNgsoXP
-NW4llmlF29PSJSqrreSJK88IlWKimVfW5lO9a5s0674duoEmzYX5vCly3sS7bkjkFdLTfefS/Qo7
-qrisxWTSCRDXqI3ksnI7mTTycGmFXKeonGr4083Vh9XN9cerifgaC9jZNT2/QgmoKR0EW7K3ZSEc
-bGYf7Ro4HIu6VpqUiA1bKdtYxXkSPuNyW8/UFPzBr4AshP1H4quI24avMzGfsX+noQ5OAjtl4aCP
-YmB4SNjYcsleTI4SfQZ2ALIByYGQE7YBISmC2Mvouz+VyDP2e1s2oGv4uM1F0QDrN7B8AapqweAR
-YqrAGwAxOZIfAMx3LwO7pCELEQrc5swf03gC+B/YPowPhx22BdPzehqwcwQcwGmY/pDe9GdLAbEO
-PugV69u+dMo6qisORhnCp/erf7y6/jhnPaaxZ67MXl/98urTm4+rv199uLl+9xbWm76Ifoi+u5h2
-Q58+vMHHu6apLp8/rw5VGilRRaXcPtc+sn5egx+LxfPkuXVbz6eTm6uPn95/fPfuzc3ql1d/vXrd
-Wyi+gIVcoPd//XV1/faXdzg+nX6Z/E00POENX/xdeatLdhG9mLwFN3vpWPikGz2vJzdtnnOwCvYV
-fiZ/KXOxqIBC+j551QLl0v28EDlPM/XkTRqLotagr4XyL4QXHwBBIMFjO5pMJqTG2hWF4BrW8Hdu
-fNMK2b4MZzNjFOIrxKiYtJXCgYKnwSavwKUCD4y/ifL7BD+DZ8dx8CPRnssiDK4sElCK8zqY68kK
-sMyS1T4BRKAPW9HE+0Rj6NwGQYEx72BO6E4lKE5EKCcXlZUozLYszErvQ+/ZmxzFWVkLDEfWQrel
-JhY33QWODgAcjNo6EFXxZhf9BvCasDk+zEC9HFo/v7idDTeisNgBy7C35Z7tS3nvcsxAO1RqoWHY
-GuK47gbZ607Zg5nrX4qy8TxaYCI8LBdo5PDxmascPQ9j17sBHYbMAZbbg0tje1nCx6SVRnXc3CZy
-6OhhEYKgBXpmloMLB6tgfF0+iP4kVM60iUsIo8Z1v/QAtL9RDzdpAauP6ZNSP4tbhdxI5o0UotM2
-bTjrNgVwsd2G8N+cdfbTlCsE+3+z+T9gNiRDir8FAymOIPqpg3BsB2GtIJS8LaeOmdHid/y9xniD
-akOPFvgNfkkH0Z+ipGp/Su+N7klRt1njqxYQooC1EzDyAIOqm5qGLQ2Sp5BTX7+jZCkMfi7bLKFZ
-xEdlrdstWqe2kQS2pJPuUOfv8y4NX615Lcy2nceJyPhBr4qM7iuJhg9s4F6c14vqcJ5E8H/k7Ghq
-Az/nzFKBaYb+AjFwU4KGjTy8uJ09nT3aaIDgbi9OiXBk/8do7f0c4ZLVukfcEQFSFonkgwcWsglf
-zJmVv87H/ULNqUrWpkw1KcOKCoIlGY6Sd68o0jte9pK2HgeWTuI2yg21gyUaQCtHmLC8+I85CGe1
-4fdi+VG2ovO9OScHULdQSe4pnScd5eu6zNCMkRcTu4SjaQCCf0OXe3terxSXBPraoLrfrsCkKI+s
-Ka1G/uZl0maixtLuS7ebwHKlDzj0094XRzTeej6AUs4dr3nTyNADBENZJU7UHy0LcLbm4HhdQEN+
-yd4H0c7BVlMdxLFCq5upovMf8RbHmecxI9J9hXBqWfLjcgp1mV5vNkJYfx8+Rp3K/1wWmyyNG39x
-AXqi6pmY/Ek4A4/SF52rV0Pu43QIhZAFRXsJxXc4gJh+JN9OG0vcNonTTgp/XJ5DEZXWJGr+ACUE
-VVdfiukQH3Z/Yl4EDSZS2tgB836HnQ1qCelOBnySbYHxJWLvMwECGsVnuh2c5aVEUmNMCw2hm1TW
-zRyME9CMTg8A8cE4Hbb45OwriEbgvxRfivDnVkpYJTsoxOxczgC5FwFEhFksZhZDZVZCS5vwpT8m
-snrEQkAHWc/oHAv/3PMUtzgFYzP1osr7YwX2t9jDk6LIMZsZ1esu24FV35bNL2VbJH/YbB8lc4zE
-QSp0ymGtYil4I/r+aoWbIwvssiyKWCcC9R8NW/QzErt0yNKOGIr017Yt2dkrhdau+QnGl5Ux1UvU
-mtWcTxvVbSx4LlTWeKdpv4OskJKzNbZQH3iWetiN6RVtvhYSTJqTLXdugXBhy5KyYmrjdL1TUAOa
-Itidx487ho2XEJxEvDOriyJRkRP7ypwFz4NZxO4UT+5wRa84AAcjpDBZZFfJmVVEEqk9Ege76XoP
-1BWOyyKh/mzFMdavxQb9DbZi46blme0S0/4aLLWayIjhX5IzeOGIhNpKqMTXFIgEtuZ1j1xmWHdN
-HHMcDZcOipdjc5vtP1eoDtiP8vLjCOu07T/RA2rpq0a89NJVFCQEQ4NFpYD8QQBLj2ThBlQnmDJG
-dLAv3e91zLWXOiu0s0vk+auHMkWtrtB0k44cm+QMonpXv3TWQ06+ns5xS77PVkRpLoWD4TP2QfDk
-OQVXhhEG8jMgna3B5O7neCqwRyXEcKh8C2hyXEoJ7oKsr4cMdktabewlxfOZRhC8UWHzg51CzBBk
-DPrAk15SpdhIRCtmzdl0v54OgHRegMjs2MBpaknAWiM5BhBgavgePOAfiXewqAtv27kkYdhLRpag
-ZWyqQXDYNbivdfk13LRFjO5Me0Eadsep6Ttnz57d72cnMmN1JGFrFD3dWMZr41pu1PNTSXMfFvNm
-KLXHEmak9iEtVQNr0Px3fype14OB/koRrgOSHj7vFnkCjg4WMB2fV+HpEJUvWCg9IbWxE37hAPDk
-nL4/77gMtfIYjfBE/6g662WGdJ9m0KgIRtO6cUhX6129NZpOZK3QO4RoCHNwGOADisYG/X9QdOPx
-fVuRv9io3FoUaksQ201IIn8J3m2lcRifgIhnrt8Adgxhl2Zpy6Iz8HI47WC4N9L2euVDuA1XvW2r
-DnbWe4TGaiAyEyChxOiwIndAFKuUzt0EWNo+GAuX2rEZ3o0ng5sxT0TKPXHEAOu57sUZ6bwTnoUb
-vo1KzXi5PvMdJhtcg10rDIXYm+iMTyHSBtG7N6+j8xrP2vAcN8Jfg/bvB0SnAhxmN9R2VBQajLoP
-jAUufg3HRjX95qGlNS8fIGEG41i5nfmwyngsdqDuwnSze5E8rbEfOQTzif9U3EMs9Jr+kHvpTThz
-jyvYBmsPzwNhRmruMTjN4nFSgGp9LB7pvyHOnbtdmWfYN1xggdB3+Gbxgb9cg/TvXbZs/BLJcsD2
-SSmLd8/63XV7DJj0lOBv5QOqgMiEOigu2wazXnQee36wJmcqnX7G5jBnzpTma+J78tTzHT5YZ64N
-B4heebDKU3kRZDBJuUM9Y85GTlF171vzc+DbLS/ADnjfQ82ZT82oKp0B5j3LRBPUDNW+8719fnZq
-pvmNmha6bbx5rwGom/x4PwI/OtwzGE7JQ8N4Z3L9XrMG6dW7rqsZYBnG9DGtBJ+qmvfAVkOs5sSR
-VnpwY28fJU6jIOjtxHfHxzxN3zkfg+tcNd9AQt2dXCMBmitOAEOQ7p5N17vujMQyHwsWwIAHZ+D+
-8xyoWJXr38Lu2HMWmYZ3BUUhVF4qsj3WaPB8myb8W+Z4LtelF5RypJ56zA2PiNtwx/QWhi6IWHV4
-ICaB0elAFT757EQVhXajOhQ7dqSPbmrrB2GBL57WhceuMMwVbd/g9nqkDDyg4eXQBY76HgV+wvP0
-ffjPKH8VyAez/NynS5A6f9klSTr1vioeUlkWaGy9/NstjrFs3UEZxioh87SuzQ02Ve6eY6fyPq0q
-oGl6YhtD+nRuNurECeB4nqbE1XSJ2XFxOXoSwYSgnxf12NnsHKlaDurHj6WZHhlOw66vM4/v7zEz
-7/m7J7mTycyvLboIbLPLMx3XIBzG96jVKX4by/WP2orKxq9+/XWBksR4BlJVn7/BVtJBNn0y6B8L
-UE8N8lZPnUB/pPAA4vP7jm/+o5OsmD3iZR7l3CmL/tNMy2GFVwJpbRmvgvSgvdhCbdMuvA5C60+q
-rXo0to6cFWrM1DteVVJs0q+hiTo20HURl8KUPiblcvtw2fNHNhnXlw4N4GfzAUJ2Ir46MRxqrYvL
-2y6ro+G5uZwoijYXkqtri24vB0HVtV+V/y0WEnarbm6obfTLBdgG4IhgVdnU2PdGPV5iUFN4RhpF
-TVlp4dDMKkubMMB1lsHs86J3XugwwTDQXUzj6h9aKaqwUFVUjB4CZ6Cc6q7lj4o/4z0tj9z6M0Ei
-d4d0fiutlkpgb1sLGdBph71ErI8vsbM82kMaW6WbPWIdSisH6tpX+JuY0yGncxZqrpGOGfDR4/pT
-PbMzthcBWFUMJIwkHU6+DSrp3ERKSqGYUguRY2B3j2yHbRv6ukeT8YsXfVcK2TDckBOOMFOGyfs6
-wizSP4v2MX5QB9KYnkR0ybxXPUlBoR7Hl+S2fZ31Up2Ph0oM+IVNU+dM69X7638lwZY6W6T2lwH1
-9FXTvY/mvrDhlkyqbTAuqDOWiEboe38Yz/GuQBcUUW+TfobdnRMu++RFZqiv3e6LJE5RppYGXTfN
-mpFVNC/o1EP5RlRP8o3pVyK2kuVDmohEvVOSbjS8+/ZK7bRGEn1lMJ/bUxfTEHXrIT+UjFE2LgWN
-DRg67xMMiNRhzdhl2aFvU/fogZYdVEfHKygvMwMbVXKs3QuHeksjm4hEkeggQvfajmyqWKj7iFZ4
-Hh1o7ce7fKNSNZM1aYBjzN+ONH2cK6vHSTqWRI2Qcjqn0iSGx1JS1Dm/W/INaenRvPREb7zHG3/e
-sDvu6kZ3tohmTQfgykPSYbTj/QvRF61fEPxReQ7phZiUV0CkcJr6GW+LeGczO/ukHzw/6BFv4xjt
-VFlK73opCOpJmJeBFFSVVizn8h5vHJSM0zExtxPW7VYXT3lyge+eBIvYv7AOiQRe/8nEQrcmFuIr
-vQ4GCfQi5wXE8CS47ZC8PIZEiriUBlK/j0MJ5+V3t5iwKArAlYwNvHRCqRl+cdv1QbBd6Cazn/03
-YG4huTLTJgYH3U0afbmpE4lzYbsW2UadGCynEdT5ucA7E/USo5U9ktKXzOkMXEOoA1a6/yBBhEpe
-+DVW16vMHWuzP3uXA709vppX7gus5PMywZf4VGTBMw4CcHsS9rDSIElBvanTB4qU1BG7ww0E3Z0Y
-fKMOkG4EETK4Yg6Eag7AR5isdxSgj1dJMM+IiBzfkKR7MsBPIplanwYPni1o+4DotD6wrWg0rnDm
-Xx7RiV9cVgf3O1R9UFvo+5CKoeqqvQHQjLeXJl0OgD7cdhmHEcsg0zADGPWzzaSrc2Al8rQQqzSI
-V6brYd3573m8M0OYR4++y1PzjUCpit6NBgsZ8QrK3STUa/hO0tC1JG5F+OskIN6lw17R99//l0qL
-4jQH+VF9BgS++M8XL5zsL9tEWvYGqdL+Ll35INAdCFYj+12aXft2m5nsv1n4cs6+d1iERobzhQwB
-w8Uc8bycjdYlcV4RTIQtCQUY2XO5Pt8QaagwjwNIRX04duoyQHQvDkujgRHedAD9RZoDJCCYYSJO
-2NTNacMgSArpkgvg6ky4M1vUXZIHZol95vW0zhn3iKTzz9EmipG4z6DBtQGScrwD4qyMNd7ZELCl
-c9UnAMY72NkJQNN8dUz2f3HlV6koTs6A+xkU3BfDYpsuVPcK+bErGoRslay3ISjhVPsWfLUQL3uJ
-3vtK7gtcoX6j2YYA+vtT9zKHfSsVvGmgX4I1MYt13ZrSvOXTFWO6PPa9o7Oy8mqaGZqKCCt+Q5/n
-pY4Y4w/HMrSp6h6YO9E1e29e3/0BQzTko0L2rlGpy+s3h7oR+RXG1gsnaXIIN07NNCi8poIL2DVr
-wbQUs3tcfo8jKpaqQyeINIVwOk61B06I6Lahfmc7ekdQhEZqV6CAIp4kK4XD1ruGYLyAWjfLwGU2
-POR092YZ1A22/hpwBQS54W2my3N7x3Unsmpp0iO0cWI2vRiu5c7CU6yfBU+h1lygW+CdxI5s76Zi
-gJlMwx+4XE4/fXgztSQaykfv6Cr6zT8LgEkN3lylwKxvoJb2+t64YusdaEHNTeamd+QK3SSyJfBH
-5xydUXHsom4L4HjiqpERP2lQzsExHrmRbDXq+tS/J0A++4rXBw1lVMr8ewZLX01V/+fkq0z+RWhj
-v95TzzCGLxmf8kbgsVK6Doi12oragasV8mG10i+8dxkwcQcm/A9nRa43
+eJztPGtz2ziS3/UrcHK5SOUkxs7MzV25TlOVmTizrs0mKdvZ/ZC4aIiEJI75GpC0ov311403SEp2
+LrMfruq8O7ZENBqNfncDzMm/1ft2W5WT6XT6S1W1TctpTdIM/marrmUkK5uW5jltMwCaXK3JvurI
+jpYtaSvSNYw0rO3qtqryBmBxlJOaJg90w4JGDkb1fk5+75oWAJK8Sxlpt1kzWWc5oocvgIQWDFbl
+LGkrvie7rN2SrJ0TWqaEpqmYgAsibFvVpFrLlTT+i4vJhMDPmleFQ30sxklW1BVvkdrYUivg/Ufh
+bLBDzv7ogCxCSVOzJFtnCXlkvAFmIA126hw/A1Ra7cq8oumkyDiv+JxUXHCJloTmLeMlBZ5qILvj
+uVg0Aai0Ik1FVnvSdHWd77NyM8FN07rmVc0znF7VKAzBj/v7/g7u76PJ5BbZJfibiIURIyO8g88N
+biXhWS22p6QrqKw3nKauPCNUioliXtXoT822a7PcfNubgTYrmP68LgvaJlszxIoa6THfKXe/wo5q
+yhs2mRgB4hqNllxebSaTlu8vrJCbDJVTDn+6ubyOb65uLyfsa8JgZ1fi+SVKQE4xEGRJ3lclc7Dp
+fXQr4HDCmkZqUsrWJJa2ESdFGr6gfNPM5BT8wa+ALIT9R+wrS7qWrnI2n5F/F0MGjgM7eemgjxJg
+eCiwkeWSnE0OEn0CdgCyAcmBkFOyBiFJgsir6Ic/lcgT8kdXtaBr+LgrWNkC69ewfAmqasHgEWKq
+wRsAMQWSHwDMD68Cu6QmCxEy3ObMH1N4Avgf2D6MD4cdtgXT02YakFMEHMApmP6Q2vRnS4FgHXxQ
+KzZ3felUTdTUFIwyhE8f43+8vrqdkx7TyAtXZm8u377+9O42/vvl9c3Vh/ew3vQs+in64cepGfp0
+/Q4fb9u2vnj5st7XWSRFFVV881L5yOZlA34sYS/Tl9ZtvZxObi5vP328/fDh3U389vVfL9/0FkrO
+z6cTF+jjX3+Lr96//YDj0+mXyd9YS1Pa0sXfpbe6IOfR2eQ9uNkLx8InZvS0mdx0RUHBKshX+Jn8
+pSrYogYKxffJ6w4o5+7nBStolssn77KElY0CfcOkfxF48QEQBBI8tKPJZCLUWLmiEFzDCv7OtW+K
+ke3LcDbTRsG+QoxKhLaKcCDhxWBb1OBSgQfa30TFQ4qfwbPjOPiRaEd5GQaXFgkoxWkTzNVkCVjl
+abxLARHow4a1yS5VGIzbEFBgzFuYE7pTBRQVREgnF1U1K/W2LEys9qH27E2OkrxqGIYja6GbShGL
+mzaBwwCAg5FbB6Jq2m6j3wFeETbHhzmol0Pr57O72XAjEosdsAx7X+3IruIPLsc0tEOlEhqGrSGO
+KzNI3hhlD2aufymr1vNogY7wsFygkMPHF65y9DyMXe8GdBgyB1huBy6N7HgFH9OOa9Vxc5vIoaOH
+hTEBzdAzkwJcOFgFoavqkfUnoXJmbVJBGNWu+5UHoPyNfLjOSlh9TJ+k+lncMuRGvGg5Y0bblOGs
+ugzA2WYTwn9zYuynrWIE+3+z+T9gNkKGIv6WBKQ4gugXA+HYDsJaQUh5W04dMqPFH/h7hfEG1UY8
+WuA3+MUdRH+Kksr9Sb3XusdZ0+Wtr1pAiARWTkDLAwyqaRsxbGngNIOc+uqDSJbC4Neqy1MxS/BR
+Wutmg9apbCSFLamkO1T5+9yk4fGKNkxv23mcspzu1arI6L6SKPjABu7FabOo96dpBP9Hzo6mNvBz
+SiwVmGaoLxAD1xVo2MjD87vZ89mjjAYINntxSoQD+z9Ea+/nAJes1j3hjgSgyCKRfPDAjLfh2ZxY
++at83C/UnKpkpctUnTLEoiBYCsOR8u4VRWrHy17S1uPA0kncRrkhd7BEA+j4CBOW5/8xB+HEa/rA
+lre8Y8b3FlQ4gKaDSnIn0nmho3TVVDmaMfJiYpdwNA1A8G/ocm9Hm1hyiaGvDeqHTQwmJfLIRqTV
+yN+iSrucNVjafTG7CSxX+oBDP+19cUTjrecDSOXc0oa2LQ89QDCUOHWi/mhZgLMVB8frAjHkl+x9
+EOUcbDVlIA4VWmamjM7f4y0OM89jRqT6CuHUsuTn5RTqMrXebISw/j58jCqV/7Uq13mWtP7iDPRE
+1jOJ8CfhDDxKX3SuXg25j9MhFEIWFO04FN/hAGJ6K3y72FjqtkmcdlL48/IUiqisEaKmj1BCiOrq
+Szkd4sPuT0LLoMVEShk7YN5tsbMhWkKqkwGfeFdifInIx5yBgEbx6W4HJUXFkdQE00JN6DrjTTsH
+4wQ0o9MDQLzXTocsPjn7CqIR+C/llzL8teMcVsn3EjE55TNA7kUAFmEWi5nFUJml0LI2fOWPsbwZ
+sRDQQdIzOsfCP/c8xR1OwdgselHVw6EC+1vs4VlR5JDNjOq1yXZg1fdV+7bqyvS7zfZJMsdIHKRC
+xxxWnHBGW9b3VzFuTligybJExDoSqL83bImfkdilQpZyxFCkv7FtSWOvIrSa5icYX14lol4SrVnF
++ayV3caSFkxmjfeK9nvICkVytsIW6iPNMw+7Nr2yK1aMg0lTYcvGLQhc2LIUWbFo45jeKaiBmMLI
+vcePe4KNlxCcRLLVq7MylZET+8qUBC+DWUTuJU/ucUWvOAAHwzjTWaSp5PQqLI3kHgUHzXS1B9EV
+TqoyFf3ZmmKsX7E1+htsxSZtR3PbJRb7a7HUaiMthn9JzuCFIyHUjkMlvhKBiGFrXvXIeY5118Qx
+x9Fw6aB4NTa33fwzRnXAfpSXH0dYp23+iR5QSV824rmXrqIgIRhqLDIFpI8MWHogC9egKsHkCaKD
+fal+r2OuvdRZop1dIM9fP1YZanWNppsacmySM4jqpn4x1iOcfDOd45Z8ny2JUlwKB8Mn5JrR9KUI
+rgQjDORnQDpZgck9zPFUYIdKiOFQ+hbQ5KTiHNyFsL4eMtit0GptLxmez7RMwGsV1j/YKcQMgSeg
+DzTtJVWSjYJoyaw5me5W0wGQygsQmR0bOE0lCVhrJMcAAnQN34MH/CPxDhZ14W07V0gY9pILS1Ay
+1tUgOOwG3Neq+hquuzJBd6a8oBh2x0XTd05evHjYzY5kxvJIwtYoarq2jDfatdzI58eS5j4s5s1Q
+ao8lzEjtY1bJBtag+e/+1LRpBgP9lSJcByQ9fG4WeQYOAwuYDs+r8XRIlC9YKD0jtbET3lIAeHZO
+3593WIZKebRGeKJ/Up3VMkO6jzNoVASjad04pKv1rt5qTRdkxegdQjSEOTgM8AFla4P+P0R0o8lD
+Vwt/sZa5NSvlliC265C01k4AMc1UhAAXCg4vVmgBYu16kLVnncCm4YSlJsmy7gS8HyLZa66OtMNe
++xBuI1axw6qJnfURobFKiPQESDQxasTCTdiNeXsFC9wFY2FUOTzN0/EkcT3moYTSTxzxwHqu23FG
+jNfCM3LNt1FpfreAFHFHhKRpGXBNUlCynY76+BQieBB9ePcmOm3wDA/PhyP8NWgrXyM6GTgxaxLt
+TLlDjVH1l7Fwxq/h2KgiXz+0tBbVIyTiYHSx2/EP65wmbAtmxHSXvJchZA32OYdgPvGfygeIsd5h
+AuR0ahPO3MMKusaaxvNsmOnq+xFOE3qcFKBaHbdH6m+Ic+dut+cF9iMXWHj0A4lefOCHV6AnDy5b
+1n7pZTlg+6+iOnDvELjr9hgw6SnB36pHVAGWM3kAXXUtZtPolHZ0b01WV1D9TNBhzpxIy1HE9+Sp
+5jt8sEFCGR4QHXuw0pq8yDSYJN2smjEnI6ezqqeu+DmIGZYXYAe07+HmxKdmVJVOAPOO5KwNGoJq
+b3x6n59GzRS/UdNCtz047zUW1eEB3rvAjw73NIZj8lAw3llfv4etQHp1tOtqBliGucKYVoJPlocC
+wFZNrOLEgRZ9cGNvNaVOAyLo7cR354c8Td+5H4Izrp6uIVE3J+JIgOKKEwARxNzfMT1xYySW+VgI
+AQY8kAOPXhRARVytfg/Nceos0o30GopNqOhkZHyqgeH5NkX4t8zxXK5LLyjlSJ32lBseEbfmju5Z
+DF2QYNX+UTAJjE4FqvDZZzKy2LQbVaHcsSN1JNRYPwgLfPG0Ljx0NWIuafsGt9cjZeABNS+HLnDU
+90jwI56n78N/RfnLQD6Y5edOJlcx/tIkWSqlvywfM16VaGy9vN4turEc3kJ5R2rGi6xp9M04WUaf
+Ygf0IatroGl6ZBtD+lRuN+rEBcDhPE+KqzWJ3WFxOXoSwYSgnxf12NluHalaDqrHT6WpHhlOI7Cv
+M0/v7ykz7/m7Z7mTycyvWUwEttnliYprEA6TB9TqDL+N1QoHbUVm85e//bZASWI8A6nKz99gK9kg
+Gz8a9A8FqOcGeaunTqA/ULgA8cWD4Zv/6CgrZk94mSc5d8yi/zTTcljhlVBKW8arKDVoL8yIdqwJ
+r4PQ+ots1x6MrSNnkAqz6EnHNWfr7Guoo44NdCbiijCljl8p3zxe9PyRTcbVZUYN+Fl/gJCdsq9O
+DIda6/zizmR1YniuLz2ysisYp/I6pNsjQlB5nVjmf4sFh93KGyFyG/1yAbYBOCJYlbcN9tNRj5cY
+1CSekQZUW9VKOGJmnWdtGOA6y2D2edE7h3SYoBnoLqZw9Q/DJFVYqEoqRg+Xc1BOeYfzZ8mf8V6Z
+R27zWUAid4d0fiutlkpgb9cwHohTFHs5WR2LYsd6tDc1toqZPWIdUisH6tpX+JuEisNT54xVX08d
+M+CD1wCO9eJOyI4FYFUJkDCSdDj5Nqikc8MprZhkSsNYgYHdPQoetn3E1x2ajF+8qDtYyIbhhpxw
+hJkyTN41EWaR/hm3j/FaHnRjehKJy+u96okzEepxfCnctq+zXqpzu6/ZgF/YjHXOyl5/vPpXEmyp
+s0VqfxlQT1813Xtu7osgbskk2wbjgjohKWuZuk+I8RzvIJigiHqb9jNsc/647JMX6aG+drsvqDhF
+mVwadF03a0ZWUbwQpynSN6J6Ct+YfRXE1rx6zFKWyndVsrWCd9+KaZzWSKquIhZze5qjG61uPeSH
+kjHKxqWgsAFD532CAZE8BBq7hDv0bfJ+PtCyherocAXlZWZgo1KOjXuRUW1pZBMRK1MVRMR9uQOb
+KhfynqMVnkcHWvvhLt+oVPVkRRrgGPO3I00f5yrsYZIOJVEjpBzPqRSJ4aGUFHXO75Z8Q1p6MC89
+0lvv8cafN+yuu7phzizRrMXBuvSQ4pDb8f4l64vWLwi+V55DeiEmFTUQyZxDgZx2ZbK1mZ190g+e
+12rE2zhGO1mWinfIJIToSeiXjCRUndWkoPwBbzJUhIrjZ2onrLqNKp6K9BzfaQkWiX8RHhIJvFaU
+s4VqTSzYV/GaGSTQi4KWEMPT4M4geXUICWdJxTWkes9HJJwXP9xhwiIpAFcyNvDKCaV6+OzO9EGw
+Xegms5/9N2vuILnS0yYah7jzNPrSlBGJcxG8YflanhgspxHU+QXDuxjNEqOVPepSl9fF2bqCkAe3
+4l4FBxFKeeHXRF7b0ne39f7sHRH09vjKX7UrsZIvqhRfDpSRBc84BIDbk7CHoBpJBuotOn2gSGkT
+kXvcQGDu2uCbeoB0zQQhg6vrQKjiAHyEyWpHAfp4mQTTXBBR4JuX4v4N8FOQLFqfGg+eLSj7gOi0
+2pMNaxWucOZfSlGJX1LVe/c7VH1QW6h7lpKh8gq/BlCMt5cxXQ6APtyZjEOLZZBp6AGM+vl6Yuoc
+WEl4WohVCsQr09Ww6vz3PN6JJsyjR90RauiaoVRZ76aEhYxoDeVuGqo1fCep6VoKbkX46ygg3tHD
+XtGPP/6XTIuSrAD5ifoMCDz7z7MzJ/vL15GSvUYqtd+kK9cM3QEjDbLfpdm1b7eZSf6bhK/m5EeH
+RWhkOJ/xEDCczxHPq9loXZIUtYCJsCUhASN7LtfnGyINJeZxAC6pD8dOXQaIHth+qTUwwhsUoL9I
+c4AEBDNMxAU2eSNbMwiSQnF5BnAZEzZmi7or5IFZYp95Pa1zxj0ixfnnaBNFS9xn0OA6gpBysgXi
+rIwV3tkQsBPnqs8ATLawsyOAuvnqmOz/4iqxVFGcnAP3cyi4z4fFtrio3Svkx65+CGRxutqEoIRT
+5VvwlUW8RMZ670G5L4aF6k1pGwLE31/MSyL2bVfwpoF6uVbHLGK6NZV+e8gUY6o89r2js7L0aooZ
+iooIK35Nn+elDhjjT4cytKnsHui71g35qF8L/glDNOSjjPeuZ8lL8Tf7pmXFJcbWcydpcgjXTk03
+KLymggtomrVgWpLZPS5/xBEZS+WhE0Sakjkdp8YDF4jELUb1Lnj0QUAJNFy5AgkU0TSNJQ5b72qC
+8WJr0y4Dl9nwkIo7PcugabH114IrEJBr2uWqPLd3Z7csr5c6PUIbF8wWL5wruZPwGOtnwXOo1Rfz
+FnjX0ZDt3YAMMJNp6SPly+mn63dTS6KmfPTur6Rf/3MDmNTgjVgRmNXN1speCxxXbLUDJai5ztzU
+jlyh60S2Av6onMMYFcUu6qYEjqeuGmnxCw0qKDjGAzedrUZdHft3CoTPvqTNXkFpldL/TsLSV1PZ
+/zn6ipR/wVrbr/fUM4zhy8vHvBF4rExcM8RaLRbtwDhGPsSxepHeZMCCOzDhfwBqDMd7
 """)
 
 ##file activate.sh
@@ -2273,9 +2290,9 @@
 
 ##file deactivate.bat
 DEACTIVATE_BAT = convert("""
-eJxzSE3OyFfIT0vj4spMU0hJTcvMS01RiPf3cYkP8wwKCXX0iQ8I8vcNCFHQ4FIAguLUEgUliIit
-KhZlqkpcnCA1WKRsuTTxWBIZ4uHv5+Hv64piEVwU3TK4BNBCmHIcKvDb6xjigWIjkI9uF1AIu7dA
-akGGW7n6uXABALCXXUI=
+eJxzSE3OyFfIT0vj4ipOLVEI8wwKCXX0iXf1C7Pl4spMU0hJTcvMS01RiPf3cYmHyQYE+fsGhCho
+cCkAAUibEkTEVhWLMlUlLk6QGixStlyaeCyJDPHw9/Pw93VFsQguim4ZXAJoIUw5DhX47XUM8UCx
+EchHtwsohN1bILUgw61c/Vy4AJYPYm4=
 """)
 
 ##file activate.ps1
Binary file script/virtualenv/script/virtualenv_support/distribute-0.6.31.tar.gz has changed
Binary file script/virtualenv/script/virtualenv_support/distribute-0.6.34.tar.gz has changed
Binary file script/virtualenv/script/virtualenv_support/pip-1.2.1.tar.gz has changed
Binary file script/virtualenv/script/virtualenv_support/pip-1.3.1.tar.gz has changed