fito.data_store package

Submodules

fito.data_store.a module

fito.data_store.base module

fito.data_store.base.AutosavedFunction[source]
AutosavedFunction fields:
args_specifications = KwargsField(default={}) cache_on = PrimitiveField() method_type = PrimitiveField(0, default=None) out_name = PrimitiveField(default=None) out_type = PrimitiveField(default=<class ‘fito.operations.operation.Operation’>)
class fito.data_store.base.BaseDataStore(*args, **kwargs)[source]

Bases: fito.operation_runner.OperationRunner

Base class for all data stores, to implement a backend you need to implement _get, save and iteritems methods

The _get is the actual get procedure, the caching strategy is part of the DataStore implementation

autosave(*args, **kwargs)[source]
find_similar(spec)[source]
get(spec)[source]

Gets an operation from this data store.

get_cache_size = NumericField(default=0)
get_id(spec)[source]

Get’s the internal id of a given spec, it should raise KeyError if spec not in self

classmethod get_key(spec)[source]
get_or_none(spec)[source]
interactive_rehash(spec)[source]
iteritems()[source]

Iterates over the datastore :return: An iterator over (operation, object) pairs

iterkeys(raw=False)[source]

Iterates over the keys of the data store :param raw: Whether to return raw documents or specs

refactor(refactor_operation, out_data_store, permissive=False)[source]
remove(spec)[source]

Removes a spec from a data store. Updates the get_cache is necessary

save(spec, object)[source]

Actual implementation that saves an object associated with the id or operation

verbose = PrimitiveField(default=False, serialize=False)

fito.data_store.dict_ds module

class fito.data_store.dict_ds.DictDataStore(*args, **kwargs)[source]

Bases: fito.data_store.base.BaseDataStore

DictDataStore fields:
execute_cache_size = NumericField(default=0) force = PrimitiveField(default=False, serialize=False) get_cache_size = NumericField(default=0) verbose = PrimitiveField(default=False, serialize=False)
clean()[source]
get_id(spec)[source]
iteritems()[source]
iterkeys(raw=False)[source]
save(spec, object)[source]

fito.data_store.file module

class fito.data_store.file.FileDataStore(*args, **kwargs)[source]

Bases: fito.data_store.base.BaseDataStore

FileDataStore fields:
execute_cache_size = NumericField(default=0) force = PrimitiveField(default=False, serialize=False) get_cache_size = NumericField(default=0) path = PrimitiveField(0) serializer = SpecFieldForSerializer(default=None) split_keys = PrimitiveField(default=True) use_class_name = PrimitiveField(default=False) verbose = PrimitiveField(default=False, serialize=False)
clean(cls=None)[source]
get_dir_for_saving(spec, create=True)[source]
get_id(spec)[source]
is_empty()[source]
iteritems()[source]
iterkeys(raw=False)[source]
path = PrimitiveField(0)
save(spec, obj)[source]
serializer = SpecFieldForSerializer(default=None)
split_keys = PrimitiveField(default=True)
use_class_name = PrimitiveField(default=False)
class fito.data_store.file.PickleSerializer(*args, **kwargs)[source]

Bases: fito.data_store.file.SingleFileSerializer

PickleSerializer fields:

get_fname(subdir)[source]
load(subdir)[source]
save(obj, subdir)[source]
class fito.data_store.file.RawSerializer(*args, **kwargs)[source]

Bases: fito.data_store.file.SingleFileSerializer

RawSerializer fields:

get_fname(subdir)[source]
load(subdir)[source]
save(obj, subdir)[source]
class fito.data_store.file.Serializer(*args, **kwargs)[source]

Bases: fito.specs.base.Spec

Serializer fields:

exists(subdir)[source]
load(subdir)[source]
save(obj, subdir)[source]
class fito.data_store.file.SingleFileSerializer(*args, **kwargs)[source]

Bases: fito.data_store.file.Serializer

SingleFileSerializer fields:

exists(subdir)[source]
get_fname(subdir)[source]

fito.data_store.mongo module

class fito.data_store.mongo.MongoHashMap(*args, **kwargs)[source]

Bases: fito.data_store.base.BaseDataStore

Mongo based key value store

add_incremental_id = PrimitiveField(default=False)
choice(n=1, rnd=None)[source]
clean()[source]
coll = PrimitiveField(0)
create_indices()[source]
get_collections()[source]
get_id(spec)[source]
iteritems()[source]
iterkeys(raw=False)[source]
save(spec, values)[source]
to_dict(include_all=False)[source]
use_gridfs = PrimitiveField(default=False)
fito.data_store.mongo.get_collection(client, name)[source]
fito.data_store.mongo.random() → x in the interval [0, 1).

fito.data_store.pandas_ds module

fito.data_store.query module

fito.data_store.refactor module

class fito.data_store.refactor.AddField(*args, **kwargs)[source]

Bases: fito.data_store.refactor.FilteredStorageRefactor

AddField fields:
doc = UnboundPrimitiveField(0, serialize=False) out_data_store = BaseSpecField(default=None, serialize=False) spec_type = PrimitiveField(0) storage_refactor = BaseSpecField(default=None) field_name = PrimitiveField(1) default_value = PrimitiveField(2)
default_value = PrimitiveField(2)
field_name = PrimitiveField(1)
transformation(doc)[source]
class fito.data_store.refactor.ChainedRefactor(*args, **kwargs)[source]

Bases: fito.data_store.refactor.StorageRefactor

ChainedRefactor fields:
doc = UnboundPrimitiveField(0, serialize=False) out_data_store = BaseSpecField(default=None, serialize=False) refactors = ArgsField(default=()) storage_refactor = BaseSpecField(default=None)
refactors = ArgsField(default=())
transformation(doc)[source]
class fito.data_store.refactor.ChangeField(*args, **kwargs)[source]

Bases: fito.data_store.refactor.FilteredStorageRefactor

ChangeField fields:
doc = UnboundPrimitiveField(0, serialize=False) out_data_store = BaseSpecField(default=None, serialize=False) spec_type = PrimitiveField(0) storage_refactor = BaseSpecField(default=None) field_name = PrimitiveField(1) old_value = PrimitiveField(2) new_value = PrimitiveField(3)
field_name = PrimitiveField(1)
matches(doc)[source]
new_value = PrimitiveField(3)
old_value = PrimitiveField(2)
recurse_first
transformation(doc)[source]
class fito.data_store.refactor.FilteredStorageRefactor(*args, **kwargs)[source]

Bases: fito.data_store.refactor.StorageRefactor

FilteredStorageRefactor fields:
doc = UnboundPrimitiveField(0, serialize=False) out_data_store = BaseSpecField(default=None, serialize=False) spec_type = PrimitiveField(0) storage_refactor = BaseSpecField(default=None)
get_spec_type_string()[source]
matches(doc)[source]
spec_type = PrimitiveField(0)
class fito.data_store.refactor.ProjectedRefactor(*args, **kwargs)[source]

Bases: fito.data_store.refactor.StorageRefactor

This class handles a different semantic for for storage_refactor field It only propagates on doc[field]

chain_transformations(doc)[source]
field = PrimitiveField(0)
transformation(doc)[source]
class fito.data_store.refactor.RemoveField(*args, **kwargs)[source]

Bases: fito.data_store.refactor.FilteredStorageRefactor

RemoveField fields:
doc = UnboundPrimitiveField(0, serialize=False) out_data_store = BaseSpecField(default=None, serialize=False) spec_type = PrimitiveField(0) storage_refactor = BaseSpecField(default=None) field_name = PrimitiveField(1)
field_name = PrimitiveField(1)
transformation(doc)[source]
class fito.data_store.refactor.RenameField(*args, **kwargs)[source]

Bases: fito.data_store.refactor.FilteredStorageRefactor

RenameField fields:
doc = UnboundPrimitiveField(0, serialize=False) out_data_store = BaseSpecField(default=None, serialize=False) spec_type = PrimitiveField(0) storage_refactor = BaseSpecField(default=None) source = PrimitiveField(1) target = PrimitiveField(2)
source = PrimitiveField(1)
target = PrimitiveField(2)
transformation(doc)[source]
class fito.data_store.refactor.StorageRefactor(*args, **kwargs)[source]

Bases: fito.operations.operation.Operation

StorageRefactor fields:
doc = UnboundPrimitiveField(0, serialize=False) out_data_store = BaseSpecField(default=None, serialize=False) storage_refactor = BaseSpecField(default=None)
add_field(spec_type, field_name, default_value=None)[source]
apply(runner)[source]
bind(*args, **kwargs)[source]
chain_refactor(refactor)[source]
chain_transformations(doc)[source]
change_field(spec_type, field_name, old_value, new_value)[source]
change_type(spec_type, new_type)[source]
doc = UnboundPrimitiveField(0, serialize=False)
empty
inplace_bind(*args, **kwargs)[source]
project(field)[source]
recurse_first
remove_field(spec_type, field_name)[source]
rename_field(spec_type, source, target)[source]
storage_refactor = BaseSpecField(default=None)
transformation(doc)[source]

fito.data_store.store_manager module

class fito.data_store.store_manager.StorageManager(get_cache_size=0, execute_cache_size=0)[source]

Bases: fito.data_store.base.BaseDataStore

StorageManager fields:
execute_cache_size = NumericField(default=0) force = PrimitiveField(default=False, serialize=False) get_cache_size = NumericField(default=0) verbose = PrimitiveField(default=False, serialize=False)
iteritems()[source]
save(operation, value)[source]
set_input(cls, ds)[source]
set_input_output(cls, ds, autosave=False)[source]
set_output(cls, ds, autosave=False)[source]

Module contents