class documentation

class FeatureViewStore: (source)

View In Hierarchy

Undocumented

Method all Undocumented
Method all_columns Undocumented
Async Method batch_write Takes a set of features, computes the derived features, and store them in the source
Method between_dates Undocumented
Method features_for Undocumented
Method filter Undocumented
Async Method freshness Undocumented
Async Method insert Undocumented
Async Method overwrite Undocumented
Method previous Undocumented
Method process_input Undocumented
Method select Undocumented
Method select_columns Undocumented
Async Method upsert Undocumented
Method using_source Sets the source to load features from.
Method with_optimised_write Undocumented
Async Method write Undocumented
Class Variable event_triggers Undocumented
Class Variable feature_filter Undocumented
Class Variable store Undocumented
Class Variable view Undocumented
Property location Undocumented
Property name Undocumented
Property request Undocumented
Property source Undocumented
Property write_input Undocumented
def all(self, limit: int | None = None) -> RetrievalJob: (source)

Undocumented

def all_columns(self, limit: int | None = None) -> RetrievalJob: (source)

Undocumented

async def batch_write(self, values: ConvertableToRetrievalJob | RetrievalJob): (source)

Takes a set of features, computes the derived features, and store them in the source

Args:
values (dict[str, list[Any]] | RetrievalJob): The features to write
Raises:
ValueError: In case the inputted features are invalid
def between_dates(self, start_date: datetime, end_date: datetime) -> RetrievalJob: (source)

Undocumented

def features_for(self, entities: ConvertableToRetrievalJob | RetrievalJob, event_timestamp_column: str | None = None) -> RetrievalJob: (source)

Undocumented

def filter(self, filter: pl.Expr | str) -> RetrievalJob: (source)

Undocumented

async def freshness(self) -> datetime | None: (source)

Undocumented

async def insert(self, values: RetrievalJob | ConvertableToRetrievalJob): (source)

Undocumented

async def overwrite(self, values: RetrievalJob | ConvertableToRetrievalJob): (source)

Undocumented

def previous(self, days: int = 0, minutes: int = 0, seconds: int = 0) -> RetrievalJob: (source)

Undocumented

def process_input(self, values: ConvertableToRetrievalJob) -> RetrievalJob: (source)

Undocumented

def select(self, features: Iterable[str]) -> FeatureViewStore: (source)

Undocumented

def select_columns(self, columns: list[str], limit: int | None = None) -> RetrievalJob: (source)

Undocumented

async def upsert(self, values: RetrievalJob | ConvertableToRetrievalJob): (source)

Undocumented

def using_source(self, source: BatchDataSource) -> FeatureViewStore: (source)

Sets the source to load features from.

```python custom_source = PostgreSQLConfig.localhost("test")

store = FeatureView.from_dir(".")

features = await (store.feature_view("titanic")
.using_source(custom_source) .all()

)

Args:
source (BatchDataSource): The source to use
Returns:
A new FeatureViewStore that sends queries to the passed source
def with_optimised_write(self) -> FeatureViewStore: (source)

Undocumented

async def write(self, values: ConvertableToRetrievalJob): (source)

Undocumented

event_triggers: set[EventTrigger] = (source)

Undocumented

feature_filter: set[str] | None = (source)

Undocumented

Undocumented

view: CompiledFeatureView = (source)

Undocumented

@property
location: FeatureLocation = (source)

Undocumented

Undocumented

@property
request: RetrievalRequest = (source)

Undocumented

@property
write_input: set[str] = (source)

Undocumented