idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
62,900
def stream_buckets ( self , bucket_type = None , timeout = None ) : if not riak . disable_list_exceptions : raise ListError ( ) _validate_timeout ( timeout ) if bucket_type : bucketfn = self . _bucket_type_bucket_builder else : bucketfn = self . _default_type_bucket_builder def make_op ( transport ) : return transport . stream_buckets ( bucket_type = bucket_type , timeout = timeout ) for bucket_list in self . _stream_with_retry ( make_op ) : bucket_list = [ bucketfn ( bytes_to_str ( name ) , bucket_type ) for name in bucket_list ] if len ( bucket_list ) > 0 : yield bucket_list
Streams the list of buckets . This is a generator method that should be iterated over .
62,901
def stream_index ( self , bucket , index , startkey , endkey = None , return_terms = None , max_results = None , continuation = None , timeout = None , term_regex = None ) : _validate_timeout ( timeout , infinity_ok = True ) page = IndexPage ( self , bucket , index , startkey , endkey , return_terms , max_results , term_regex ) page . stream = True resource = self . _acquire ( ) transport = resource . object page . results = transport . stream_index ( bucket , index , startkey , endkey , return_terms = return_terms , max_results = max_results , continuation = continuation , timeout = timeout , term_regex = term_regex ) page . results . attach ( resource ) return page
Queries a secondary index streaming matching keys through an iterator .
62,902
def stream_keys ( self , bucket , timeout = None ) : if not riak . disable_list_exceptions : raise ListError ( ) _validate_timeout ( timeout ) def make_op ( transport ) : return transport . stream_keys ( bucket , timeout = timeout ) for keylist in self . _stream_with_retry ( make_op ) : if len ( keylist ) > 0 : if six . PY2 : yield keylist else : yield [ bytes_to_str ( item ) for item in keylist ]
Lists all keys in a bucket via a stream . This is a generator method which should be iterated over .
62,903
def ts_stream_keys ( self , table , timeout = None ) : if not riak . disable_list_exceptions : raise ListError ( ) t = table if isinstance ( t , six . string_types ) : t = Table ( self , table ) _validate_timeout ( timeout ) resource = self . _acquire ( ) transport = resource . object stream = transport . ts_stream_keys ( t , timeout ) stream . attach ( resource ) try : for keylist in stream : if len ( keylist ) > 0 : yield keylist finally : stream . close ( )
Lists all keys in a time series table via a stream . This is a generator method which should be iterated over .
62,904
def multiget ( self , pairs , ** params ) : if self . _multiget_pool : params [ 'pool' ] = self . _multiget_pool return riak . client . multi . multiget ( self , pairs , ** params )
Fetches many keys in parallel via threads .
62,905
def multiput ( self , objs , ** params ) : if self . _multiput_pool : params [ 'pool' ] = self . _multiput_pool return riak . client . multi . multiput ( self , objs , ** params )
Stores objects in parallel via threads .
62,906
def fetch_datatype ( self , bucket , key , r = None , pr = None , basic_quorum = None , notfound_ok = None , timeout = None , include_context = None ) : dtype , value , context = self . _fetch_datatype ( bucket , key , r = r , pr = pr , basic_quorum = basic_quorum , notfound_ok = notfound_ok , timeout = timeout , include_context = include_context ) return TYPES [ dtype ] ( bucket = bucket , key = key , value = value , context = context )
Fetches the value of a Riak Datatype .
62,907
def update_datatype ( self , datatype , w = None , dw = None , pw = None , return_body = None , timeout = None , include_context = None ) : _validate_timeout ( timeout ) with self . _transport ( ) as transport : return transport . update_datatype ( datatype , w = w , dw = dw , pw = pw , return_body = return_body , timeout = timeout , include_context = include_context )
Sends an update to a Riak Datatype to the server . This operation is not idempotent and so will not be retried automatically .
62,908
def _non_connect_send_recv ( self , msg_code , data = None ) : self . _non_connect_send_msg ( msg_code , data ) return self . _recv_msg ( )
Similar to self . _send_recv but doesn t try to initiate a connection thus preventing an infinite loop .
62,909
def _non_connect_send_msg ( self , msg_code , data ) : try : self . _socket . sendall ( self . _encode_msg ( msg_code , data ) ) except ( IOError , socket . error ) as e : if e . errno == errno . EPIPE : raise ConnectionClosed ( e ) else : raise
Similar to self . _send but doesn t try to initiate a connection thus preventing an infinite loop .
62,910
def _init_security ( self ) : if not self . _starttls ( ) : raise SecurityError ( "Could not start TLS connection" ) self . _ssl_handshake ( ) if not self . _auth ( ) : raise SecurityError ( "Could not authorize connection" )
Initialize a secure connection to the server .
62,911
def _starttls ( self ) : resp_code , _ = self . _non_connect_send_recv ( riak . pb . messages . MSG_CODE_START_TLS ) if resp_code == riak . pb . messages . MSG_CODE_START_TLS : return True else : return False
Exchange a STARTTLS message with Riak to initiate secure communications return True is Riak responds with a STARTTLS response False otherwise
62,912
def close ( self ) : if self . _socket : if USE_STDLIB_SSL : try : self . _socket . shutdown ( socket . SHUT_RDWR ) except EnvironmentError : logging . debug ( 'Exception occurred while shutting ' 'down socket.' , exc_info = True ) self . _socket . close ( ) del self . _socket
Closes the underlying socket of the PB connection .
62,913
def content_property ( name , doc = None ) : def _setter ( self , value ) : if len ( self . siblings ) == 0 : self . siblings = [ RiakContent ( self ) ] if len ( self . siblings ) != 1 : raise ConflictError ( ) setattr ( self . siblings [ 0 ] , name , value ) def _getter ( self ) : if len ( self . siblings ) == 0 : return if len ( self . siblings ) != 1 : raise ConflictError ( ) return getattr ( self . siblings [ 0 ] , name ) return property ( _getter , _setter , doc = doc )
Delegates a property to the first sibling in a RiakObject raising an error when the object is in conflict .
62,914
def content_method ( name ) : def _delegate ( self , * args , ** kwargs ) : if len ( self . siblings ) != 1 : raise ConflictError ( ) return getattr ( self . siblings [ 0 ] , name ) . __call__ ( * args , ** kwargs ) _delegate . __doc__ = getattr ( RiakContent , name ) . __doc__ return _delegate
Delegates a method to the first sibling in a RiakObject raising an error when the object is in conflict .
62,915
def store ( self , w = None , dw = None , pw = None , return_body = True , if_none_match = False , timeout = None ) : if len ( self . siblings ) != 1 : raise ConflictError ( "Attempting to store an invalid object, " "resolve the siblings first" ) self . client . put ( self , w = w , dw = dw , pw = pw , return_body = return_body , if_none_match = if_none_match , timeout = timeout ) return self
Store the object in Riak . When this operation completes the object could contain new metadata and possibly new data if Riak contains a newer version of the object according to the object s vector clock .
62,916
def reload ( self , r = None , pr = None , timeout = None , basic_quorum = None , notfound_ok = None , head_only = False ) : self . client . get ( self , r = r , pr = pr , timeout = timeout , head_only = head_only ) return self
Reload the object from Riak . When this operation completes the object could contain new metadata and a new value if the object was updated in Riak since it was last retrieved .
62,917
def delete ( self , r = None , w = None , dw = None , pr = None , pw = None , timeout = None ) : self . client . delete ( self , r = r , w = w , dw = dw , pr = pr , pw = pw , timeout = timeout ) self . clear ( ) return self
Delete this object from Riak .
62,918
def get_encoder ( self , content_type ) : if content_type in self . _encoders : return self . _encoders [ content_type ] else : return self . _client . get_encoder ( content_type )
Get the encoding function for the provided content type for this bucket .
62,919
def get_decoder ( self , content_type ) : if content_type in self . _decoders : return self . _decoders [ content_type ] else : return self . _client . get_decoder ( content_type )
Get the decoding function for the provided content type for this bucket .
62,920
def multiget ( self , keys , r = None , pr = None , timeout = None , basic_quorum = None , notfound_ok = None , head_only = False ) : bkeys = [ ( self . bucket_type . name , self . name , key ) for key in keys ] return self . _client . multiget ( bkeys , r = r , pr = pr , timeout = timeout , basic_quorum = basic_quorum , notfound_ok = notfound_ok , head_only = head_only )
Retrieves a list of keys belonging to this bucket in parallel .
62,921
def stream_buckets ( self , timeout = None ) : return self . _client . stream_buckets ( bucket_type = self , timeout = timeout )
Streams the list of buckets under this bucket - type . This is a generator method that should be iterated over .
62,922
def incr ( self , d ) : with self . lock : self . p = self . value ( ) + d
Increases the value by the argument .
62,923
def make_random_client_id ( self ) : if PY2 : return ( 'py_%s' % base64 . b64encode ( str ( random . randint ( 1 , 0x40000000 ) ) ) ) else : return ( 'py_%s' % base64 . b64encode ( bytes ( str ( random . randint ( 1 , 0x40000000 ) ) , 'ascii' ) ) )
Returns a random client identifier
62,924
def get ( self , robj , r = None , pr = None , timeout = None , basic_quorum = None , notfound_ok = None , head_only = False ) : raise NotImplementedError
Fetches an object .
62,925
def put ( self , robj , w = None , dw = None , pw = None , return_body = None , if_none_match = None , timeout = None ) : raise NotImplementedError
Stores an object .
62,926
def delete ( self , robj , rw = None , r = None , w = None , dw = None , pr = None , pw = None , timeout = None ) : raise NotImplementedError
Deletes an object .
62,927
def update_counter ( self , bucket , key , value , w = None , dw = None , pw = None , returnvalue = False ) : raise NotImplementedError
Updates a counter by the given value .
62,928
def fetch_datatype ( self , bucket , key , r = None , pr = None , basic_quorum = None , notfound_ok = None , timeout = None , include_context = None ) : raise NotImplementedError
Fetches a Riak Datatype .
62,929
def update_datatype ( self , datatype , w = None , dw = None , pw = None , return_body = None , timeout = None , include_context = None ) : raise NotImplementedError
Updates a Riak Datatype by sending local operations to the server .
62,930
def _search_mapred_emu ( self , index , query ) : phases = [ ] if not self . phaseless_mapred ( ) : phases . append ( { 'language' : 'erlang' , 'module' : 'riak_kv_mapreduce' , 'function' : 'reduce_identity' , 'keep' : True } ) mr_result = self . mapred ( { 'module' : 'riak_search' , 'function' : 'mapred_search' , 'arg' : [ index , query ] } , phases ) result = { 'num_found' : len ( mr_result ) , 'max_score' : 0.0 , 'docs' : [ ] } for bucket , key , data in mr_result : if u'score' in data and data [ u'score' ] [ 0 ] > result [ 'max_score' ] : result [ 'max_score' ] = data [ u'score' ] [ 0 ] result [ 'docs' ] . append ( { u'id' : key } ) return result
Emulates a search request via MapReduce . Used in the case where the transport supports MapReduce but has no native search capability .
62,931
def _get_index_mapred_emu ( self , bucket , index , startkey , endkey = None ) : phases = [ ] if not self . phaseless_mapred ( ) : phases . append ( { 'language' : 'erlang' , 'module' : 'riak_kv_mapreduce' , 'function' : 'reduce_identity' , 'keep' : True } ) if endkey : result = self . mapred ( { 'bucket' : bucket , 'index' : index , 'start' : startkey , 'end' : endkey } , phases ) else : result = self . mapred ( { 'bucket' : bucket , 'index' : index , 'key' : startkey } , phases ) return [ key for resultbucket , key in result ]
Emulates a secondary index request via MapReduce . Used in the case where the transport supports MapReduce but has no native secondary index query capability .
62,932
def _parse_body ( self , robj , response , expected_statuses ) : if response is None : return None status , headers , data = response if not status : m = 'Could not contact Riak Server: http://{0}:{1}!' . format ( self . _node . host , self . _node . http_port ) raise RiakError ( m ) self . check_http_code ( status , expected_statuses ) if 'x-riak-vclock' in headers : robj . vclock = VClock ( headers [ 'x-riak-vclock' ] , 'base64' ) if status == 404 : robj . siblings = [ ] return None elif status == 201 : robj . key = headers [ 'location' ] . strip ( ) . split ( '/' ) [ - 1 ] elif status == 300 : ctype , params = parse_header ( headers [ 'content-type' ] ) if ctype == 'multipart/mixed' : if six . PY3 : data = bytes_to_str ( data ) boundary = re . compile ( '\r?\n--%s(?:--)?\r?\n' % re . escape ( params [ 'boundary' ] ) ) parts = [ message_from_string ( p ) for p in re . split ( boundary , data ) [ 1 : - 1 ] ] robj . siblings = [ self . _parse_sibling ( RiakContent ( robj ) , part . items ( ) , part . get_payload ( ) ) for part in parts ] if robj . resolver is not None : robj . resolver ( robj ) return robj else : raise Exception ( 'unexpected sibling response format: {0}' . format ( ctype ) ) robj . siblings = [ self . _parse_sibling ( RiakContent ( robj ) , headers . items ( ) , data ) ] return robj
Parse the body of an object response and populate the object .
62,933
def _parse_sibling ( self , sibling , headers , data ) : sibling . exists = True for header , value in headers : header = header . lower ( ) if header == 'content-type' : sibling . content_type , sibling . charset = self . _parse_content_type ( value ) elif header == 'etag' : sibling . etag = value elif header == 'link' : sibling . links = self . _parse_links ( value ) elif header == 'last-modified' : sibling . last_modified = mktime_tz ( parsedate_tz ( value ) ) elif header . startswith ( 'x-riak-meta-' ) : metakey = header . replace ( 'x-riak-meta-' , '' ) sibling . usermeta [ metakey ] = value elif header . startswith ( 'x-riak-index-' ) : field = header . replace ( 'x-riak-index-' , '' ) reader = csv . reader ( [ value ] , skipinitialspace = True ) for line in reader : for token in line : token = decode_index_value ( field , token ) sibling . add_index ( field , token ) elif header == 'x-riak-deleted' : sibling . exists = False sibling . encoded_data = data return sibling
Parses a single sibling out of a response .
62,934
def _to_link_header ( self , link ) : try : bucket , key , tag = link except ValueError : raise RiakError ( "Invalid link tuple %s" % link ) tag = tag if tag is not None else bucket url = self . object_path ( bucket , key ) header = '<%s>; riaktag="%s"' % ( url , tag ) return header
Convert the link tuple to a link header string . Used internally .
62,935
def _normalize_json_search_response ( self , json ) : result = { } if 'facet_counts' in json : result [ 'facet_counts' ] = json [ u'facet_counts' ] if 'grouped' in json : result [ 'grouped' ] = json [ u'grouped' ] if 'stats' in json : result [ 'stats' ] = json [ u'stats' ] if u'response' in json : result [ 'num_found' ] = json [ u'response' ] [ u'numFound' ] result [ 'max_score' ] = float ( json [ u'response' ] [ u'maxScore' ] ) docs = [ ] for doc in json [ u'response' ] [ u'docs' ] : resdoc = { } if u'_yz_rk' in doc : resdoc = doc else : resdoc [ u'id' ] = doc [ u'id' ] if u'fields' in doc : for k , v in six . iteritems ( doc [ u'fields' ] ) : resdoc [ k ] = v docs . append ( resdoc ) result [ 'docs' ] = docs return result
Normalizes a JSON search response so that PB and HTTP have the same return value
62,936
def _normalize_xml_search_response ( self , xml ) : target = XMLSearchResult ( ) parser = ElementTree . XMLParser ( target = target ) parser . feed ( xml ) return parser . close ( )
Normalizes an XML search response so that PB and HTTP have the same return value
62,937
def connect ( self ) : HTTPConnection . connect ( self ) self . sock . setsockopt ( socket . IPPROTO_TCP , socket . TCP_NODELAY , 1 )
Set TCP_NODELAY on socket
62,938
def _with_retries ( self , pool , fn ) : skip_nodes = [ ] def _skip_bad_nodes ( transport ) : return transport . _node not in skip_nodes retry_count = self . retries - 1 first_try = True current_try = 0 while True : try : with pool . transaction ( _filter = _skip_bad_nodes , yield_resource = True ) as resource : transport = resource . object try : return fn ( transport ) except ( IOError , HTTPException , ConnectionClosed ) as e : resource . errored = True if _is_retryable ( e ) : transport . _node . error_rate . incr ( 1 ) skip_nodes . append ( transport . _node ) if first_try : continue else : raise BadResource ( e ) else : raise except BadResource as e : if current_try < retry_count : resource . errored = True current_try += 1 continue else : raise e . args [ 0 ] finally : first_try = False
Performs the passed function with retries against the given pool .
62,939
def _choose_pool ( self , protocol = None ) : if not protocol : protocol = self . protocol if protocol == 'http' : pool = self . _http_pool elif protocol == 'tcp' or protocol == 'pbc' : pool = self . _tcp_pool else : raise ValueError ( "invalid protocol %s" % protocol ) if pool is None or self . _closed : raise RuntimeError ( "Client is closed." ) return pool
Selects a connection pool according to the default protocol and the passed one .
62,940
def default_encoder ( obj ) : if isinstance ( obj , bytes ) : return json . dumps ( bytes_to_str ( obj ) , ensure_ascii = False ) . encode ( "utf-8" ) else : return json . dumps ( obj , ensure_ascii = False ) . encode ( "utf-8" )
Default encoder for JSON datatypes which returns UTF - 8 encoded json instead of the default bloated backslash u XXXX escaped ASCII strings .
62,941
def close ( self ) : if not self . _closed : self . _closed = True self . _stop_multi_pools ( ) if self . _http_pool is not None : self . _http_pool . clear ( ) self . _http_pool = None if self . _tcp_pool is not None : self . _tcp_pool . clear ( ) self . _tcp_pool = None
Iterate through all of the connections and close each one .
62,942
def _create_credentials ( self , n ) : if not n : return n elif isinstance ( n , SecurityCreds ) : return n elif isinstance ( n , dict ) : return SecurityCreds ( ** n ) else : raise TypeError ( "%s is not a valid security configuration" % repr ( n ) )
Create security credentials if necessary .
62,943
def _connect ( self ) : timeout = None if self . _options is not None and 'timeout' in self . _options : timeout = self . _options [ 'timeout' ] if self . _client . _credentials : self . _connection = self . _connection_class ( host = self . _node . host , port = self . _node . http_port , credentials = self . _client . _credentials , timeout = timeout ) else : self . _connection = self . _connection_class ( host = self . _node . host , port = self . _node . http_port , timeout = timeout ) self . server_version
Use the appropriate connection class ; optionally with security .
62,944
def _security_auth_headers ( self , username , password , headers ) : userColonPassword = username + ":" + password b64UserColonPassword = base64 . b64encode ( str_to_bytes ( userColonPassword ) ) . decode ( "ascii" ) headers [ 'Authorization' ] = 'Basic %s' % b64UserColonPassword
Add in the requisite HTTP Authentication Headers
62,945
def query ( self , query , interpolations = None ) : return self . _client . ts_query ( self , query , interpolations )
Queries a timeseries table .
62,946
def getConfigDirectory ( ) : if platform . system ( ) == 'Windows' : return os . path . join ( os . environ [ 'APPDATA' ] , 'ue4cli' ) else : return os . path . join ( os . environ [ 'HOME' ] , '.config' , 'ue4cli' )
Determines the platform - specific config directory location for ue4cli
62,947
def setConfigKey ( key , value ) : configFile = ConfigurationManager . _configFile ( ) return JsonDataManager ( configFile ) . setKey ( key , value )
Sets the config data value for the specified dictionary key
62,948
def clearCache ( ) : if os . path . exists ( CachedDataManager . _cacheDir ( ) ) == True : shutil . rmtree ( CachedDataManager . _cacheDir ( ) )
Clears any cached data we have stored about specific engine versions
62,949
def getCachedDataKey ( engineVersionHash , key ) : cacheFile = CachedDataManager . _cacheFileForHash ( engineVersionHash ) return JsonDataManager ( cacheFile ) . getKey ( key )
Retrieves the cached data value for the specified engine version hash and dictionary key
62,950
def setCachedDataKey ( engineVersionHash , key , value ) : cacheFile = CachedDataManager . _cacheFileForHash ( engineVersionHash ) return JsonDataManager ( cacheFile ) . setKey ( key , value )
Sets the cached data value for the specified engine version hash and dictionary key
62,951
def writeFile ( filename , data ) : with open ( filename , 'wb' ) as f : f . write ( data . encode ( 'utf-8' ) )
Writes data to a file
62,952
def patchFile ( filename , replacements ) : patched = Utility . readFile ( filename ) for key in replacements : patched = patched . replace ( key , replacements [ key ] ) Utility . writeFile ( filename , patched )
Applies the supplied list of replacements to a file
62,953
def escapePathForShell ( path ) : if platform . system ( ) == 'Windows' : return '"{}"' . format ( path . replace ( '"' , '""' ) ) else : return shellescape . quote ( path )
Escapes a filesystem path for use as a command - line argument
62,954
def join ( delim , items , quotes = False ) : transform = lambda s : s if quotes == True : transform = lambda s : s if ' ' not in s else '"{}"' . format ( s ) stripped = list ( [ transform ( i ) for i in items if len ( i ) > 0 ] ) if len ( stripped ) > 0 : return delim . join ( stripped ) return ''
Joins the supplied list of strings after removing any empty strings from the list