Skip to main content
Version: 1.5.0

SparkFilesystemDatasource

Signature

class great_expectations.datasource.fluent.SparkFilesystemDatasource(
*,
type: Literal['spark_filesystem'] = 'spark_filesystem',
name: str,
id: Optional[uuid.UUID] = None,
assets: List[Union[great_expectations.datasource.fluent.data_asset.path.spark.csv_asset.CSVAsset,
great_expectations.datasource.fluent.data_asset.path.spark.csv_asset.DirectoryCSVAsset,
great_expectations.datasource.fluent.data_asset.path.spark.parquet_asset.ParquetAsset,
great_expectations.datasource.fluent.data_asset.path.spark.parquet_asset.DirectoryParquetAsset,
great_expectations.datasource.fluent.data_asset.path.spark.orc_asset.ORCAsset,
great_expectations.datasource.fluent.data_asset.path.spark.orc_asset.DirectoryORCAsset,
great_expectations.datasource.fluent.data_asset.path.spark.json_asset.JSONAsset,
great_expectations.datasource.fluent.data_asset.path.spark.json_asset.DirectoryJSONAsset,
great_expectations.datasource.fluent.data_asset.path.spark.text_asset.TextAsset,
great_expectations.datasource.fluent.data_asset.path.spark.text_asset.DirectoryTextAsset,
great_expectations.datasource.fluent.data_asset.path.spark.delta_asset.DeltaAsset,
great_expectations.datasource.fluent.data_asset.path.spark.delta_asset.DirectoryDeltaAsset]] = [],
spark_config: Optional[Dict[pydantic.v1.types.StrictStr,
Union[pydantic.v1.types.StrictStr,
pydantic.v1.types.StrictInt,
pydantic.v1.types.StrictFloat,
pydantic.v1.types.StrictBool]]] = None,
force_reuse_spark_context: bool = True,
persist: bool = True,
base_directory: pathlib.Path,
data_context_root_directory: Optional[pathlib.Path] = None
)

SparkFilesystemDatasource is a subclass of SparkDatasource which connects to the filesystem.

Methods

add_csv_asset

Signature

add_csv_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f30b1117830> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f30b11178f0> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f30b1117a40> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f30b1117bf0> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f30b1117cb0> = None,
pathGlobFilter: typing.Optional[typing.Union[bool,
str]] = None,
recursiveFileLookup: typing.Optional[typing.Union[bool,
str]] = None,
modifiedBefore: typing.Optional[typing.Union[bool,
str]] = None,
modifiedAfter: typing.Optional[typing.Union[bool,
str]] = None,
schema: typing.Optional[typing.Union[great_expectations.datasource.fluent.serializable_types.pyspark.SerializableStructType,
str]] = None,
sep: typing.Optional[str] = None,
encoding: typing.Optional[str] = None,
quote: typing.Optional[str] = None,
escape: typing.Optional[str] = None,
comment: typing.Optional[str] = None,
header: typing.Optional[typing.Union[bool,
str]] = None,
inferSchema: typing.Optional[typing.Union[bool,
str]] = None,
ignoreLeadingWhiteSpace: typing.Optional[typing.Union[bool,
str]] = None,
ignoreTrailingWhiteSpace: typing.Optional[typing.Union[bool,
str]] = None,
nullValue: typing.Optional[str] = None,
nanValue: typing.Optional[str] = None,
positiveInf: typing.Optional[str] = None,
negativeInf: typing.Optional[str] = None,
dateFormat: typing.Optional[str] = None,
timestampFormat: typing.Optional[str] = None,
maxColumns: typing.Optional[typing.Union[int,
str]] = None,
maxCharsPerColumn: typing.Optional[typing.Union[int,
str]] = None,
maxMalformedLogPerPartition: typing.Optional[typing.Union[int,
str]] = None,
mode: typing.Optional[typing.Literal['PERMISSIVE',
'DROPMALFORMED',
'FAILFAST']] = None,
columnNameOfCorruptRecord: typing.Optional[str] = None,
multiLine: typing.Optional[typing.Union[bool,
str]] = None,
charToEscapeQuoteEscaping: typing.Optional[str] = None,
samplingRatio: typing.Optional[typing.Union[float,
str]] = None,
enforceSchema: typing.Optional[typing.Union[bool,
str]] = None,
emptyValue: typing.Optional[str] = None,
locale: typing.Optional[str] = None,
lineSep: typing.Optional[str] = None,
unescapedQuoteHandling: typing.Optional[typing.Literal['STOP_AT_CLOSING_QUOTE',
'BACK_TO_DELIMITER',
'STOP_AT_DELIMITER',
'SKIP_VALUE',
'RAISE_ERROR']] = None
) → pydantic.BaseModel

Add a csv asset to the datasource.

add_delta_asset

Signature

add_delta_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f30b0f6f7d0> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f30b0f6f890> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f30b0f6f9e0> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f30b0f6fb90> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f30b0f6fc50> = None,
timestampAsOf: typing.Optional[str] = None,
versionAsOf: typing.Optional[str] = None
) → pydantic.BaseModel

Add a delta asset to the datasource.

add_directory_csv_asset

Signature

add_directory_csv_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f30b0f6dfa0> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f30b0f6e060> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f30b0f6e1b0> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f30b0f6e360> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f30b0f6e420> = None,
pathGlobFilter: typing.Optional[typing.Union[bool,
str]] = None,
recursiveFileLookup: typing.Optional[typing.Union[bool,
str]] = None,
modifiedBefore: typing.Optional[typing.Union[bool,
str]] = None,
modifiedAfter: typing.Optional[typing.Union[bool,
str]] = None,
schema: typing.Optional[typing.Union[great_expectations.datasource.fluent.serializable_types.pyspark.SerializableStructType,
str]] = None,
sep: typing.Optional[str] = None,
encoding: typing.Optional[str] = None,
quote: typing.Optional[str] = None,
escape: typing.Optional[str] = None,
comment: typing.Optional[str] = None,
header: typing.Optional[typing.Union[bool,
str]] = None,
inferSchema: typing.Optional[typing.Union[bool,
str]] = None,
ignoreLeadingWhiteSpace: typing.Optional[typing.Union[bool,
str]] = None,
ignoreTrailingWhiteSpace: typing.Optional[typing.Union[bool,
str]] = None,
nullValue: typing.Optional[str] = None,
nanValue: typing.Optional[str] = None,
positiveInf: typing.Optional[str] = None,
negativeInf: typing.Optional[str] = None,
dateFormat: typing.Optional[str] = None,
timestampFormat: typing.Optional[str] = None,
maxColumns: typing.Optional[typing.Union[int,
str]] = None,
maxCharsPerColumn: typing.Optional[typing.Union[int,
str]] = None,
maxMalformedLogPerPartition: typing.Optional[typing.Union[int,
str]] = None,
mode: typing.Optional[typing.Literal['PERMISSIVE',
'DROPMALFORMED',
'FAILFAST']] = None,
columnNameOfCorruptRecord: typing.Optional[str] = None,
multiLine: typing.Optional[typing.Union[bool,
str]] = None,
charToEscapeQuoteEscaping: typing.Optional[str] = None,
samplingRatio: typing.Optional[typing.Union[float,
str]] = None,
enforceSchema: typing.Optional[typing.Union[bool,
str]] = None,
emptyValue: typing.Optional[str] = None,
locale: typing.Optional[str] = None,
lineSep: typing.Optional[str] = None,
unescapedQuoteHandling: typing.Optional[typing.Literal['STOP_AT_CLOSING_QUOTE',
'BACK_TO_DELIMITER',
'STOP_AT_DELIMITER',
'SKIP_VALUE',
'RAISE_ERROR']] = None,
data_directory: pathlib.Path
) → pydantic.BaseModel

Add a directory_csv asset to the datasource.

add_directory_delta_asset

Signature

add_directory_delta_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f30b0f88aa0> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f30b0f88b60> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f30b0f88cb0> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f30b0f88e60> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f30b0f88f20> = None,
timestampAsOf: typing.Optional[str] = None,
versionAsOf: typing.Optional[str] = None,
data_directory: pathlib.Path
) → pydantic.BaseModel

Add a directory_delta asset to the datasource.

add_directory_json_asset

Signature

add_directory_json_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f30b0faa9f0> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f30b0faaab0> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f30b0faac00> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f30b0faadb0> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f30b0faae70> = None,
pathGlobFilter: typing.Optional[typing.Union[bool,
str]] = None,
recursiveFileLookup: typing.Optional[typing.Union[bool,
str]] = None,
modifiedBefore: typing.Optional[typing.Union[bool,
str]] = None,
modifiedAfter: typing.Optional[typing.Union[bool,
str]] = None,
schema: typing.Optional[typing.Union[great_expectations.datasource.fluent.serializable_types.pyspark.SerializableStructType,
str]] = None,
primitivesAsString: typing.Optional[typing.Union[bool,
str]] = None,
prefersDecimal: typing.Optional[typing.Union[bool,
str]] = None,
allowComments: typing.Optional[typing.Union[bool,
str]] = None,
allowUnquotedFieldNames: typing.Optional[typing.Union[bool,
str]] = None,
allowSingleQuotes: typing.Optional[typing.Union[bool,
str]] = None,
allowNumericLeadingZero: typing.Optional[typing.Union[bool,
str]] = None,
allowBackslashEscapingAnyCharacter: typing.Optional[typing.Union[bool,
str]] = None,
mode: typing.Optional[typing.Literal['PERMISSIVE',
'DROPMALFORMED',
'FAILFAST']] = None,
columnNameOfCorruptRecord: typing.Optional[str] = None,
dateFormat: typing.Optional[str] = None,
timestampFormat: typing.Optional[str] = None,
multiLine: typing.Optional[typing.Union[bool,
str]] = None,
allowUnquotedControlChars: typing.Optional[typing.Union[bool,
str]] = None,
lineSep: typing.Optional[str] = None,
samplingRatio: typing.Optional[typing.Union[float,
str]] = None,
dropFieldIfAllNull: typing.Optional[typing.Union[bool,
str]] = None,
encoding: typing.Optional[str] = None,
locale: typing.Optional[str] = None,
allowNonNumericNumbers: typing.Optional[typing.Union[bool,
str]] = None,
data_directory: pathlib.Path
) → pydantic.BaseModel

Add a directory_json asset to the datasource.

add_directory_orc_asset

Signature

add_directory_orc_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f30b0fd6210> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f30b0fd62d0> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f30b0fd6420> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f30b0fd65d0> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f30b0fd6690> = None,
pathGlobFilter: typing.Optional[typing.Union[bool,
str]] = None,
recursiveFileLookup: typing.Optional[typing.Union[bool,
str]] = None,
modifiedBefore: typing.Optional[typing.Union[bool,
str]] = None,
modifiedAfter: typing.Optional[typing.Union[bool,
str]] = None,
mergeSchema: typing.Optional[typing.Union[bool,
str]] = False,
data_directory: pathlib.Path
) → pydantic.BaseModel

Add a directory_orc asset to the datasource.

add_directory_parquet_asset

Signature

add_directory_parquet_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f30b0ff9d30> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f30b0ff9df0> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f30b0ff9f40> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f30b0ffa0f0> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f30b0ffa1b0> = None,
pathGlobFilter: typing.Optional[typing.Union[bool,
str]] = None,
recursiveFileLookup: typing.Optional[typing.Union[bool,
str]] = None,
modifiedBefore: typing.Optional[typing.Union[bool,
str]] = None,
modifiedAfter: typing.Optional[typing.Union[bool,
str]] = None,
mergeSchema: typing.Optional[typing.Union[bool,
str]] = None,
datetimeRebaseMode: typing.Optional[typing.Literal['EXCEPTION',
'CORRECTED',
'LEGACY']] = None,
int96RebaseMode: typing.Optional[typing.Literal['EXCEPTION',
'CORRECTED',
'LEGACY']] = None,
data_directory: pathlib.Path
) → pydantic.BaseModel

Add a directory_parquet asset to the datasource.

add_directory_text_asset

Signature

add_directory_text_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f30b1019520> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f30b10195e0> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f30b1019730> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f30b10198e0> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f30b10199a0> = None,
pathGlobFilter: typing.Optional[typing.Union[bool,
str]] = None,
recursiveFileLookup: typing.Optional[typing.Union[bool,
str]] = None,
modifiedBefore: typing.Optional[typing.Union[bool,
str]] = None,
modifiedAfter: typing.Optional[typing.Union[bool,
str]] = None,
wholetext: bool = False,
lineSep: typing.Optional[str] = None,
data_directory: pathlib.Path
) → pydantic.BaseModel

Add a directory_text asset to the datasource.

add_json_asset

Signature

add_json_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f30b0fa8380> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f30b0fa8590> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f30b0fa86e0> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f30b0fa8890> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f30b0fa8950> = None,
pathGlobFilter: typing.Optional[typing.Union[bool,
str]] = None,
recursiveFileLookup: typing.Optional[typing.Union[bool,
str]] = None,
modifiedBefore: typing.Optional[typing.Union[bool,
str]] = None,
modifiedAfter: typing.Optional[typing.Union[bool,
str]] = None,
schema: typing.Optional[typing.Union[great_expectations.datasource.fluent.serializable_types.pyspark.SerializableStructType,
str]] = None,
primitivesAsString: typing.Optional[typing.Union[bool,
str]] = None,
prefersDecimal: typing.Optional[typing.Union[bool,
str]] = None,
allowComments: typing.Optional[typing.Union[bool,
str]] = None,
allowUnquotedFieldNames: typing.Optional[typing.Union[bool,
str]] = None,
allowSingleQuotes: typing.Optional[typing.Union[bool,
str]] = None,
allowNumericLeadingZero: typing.Optional[typing.Union[bool,
str]] = None,
allowBackslashEscapingAnyCharacter: typing.Optional[typing.Union[bool,
str]] = None,
mode: typing.Optional[typing.Literal['PERMISSIVE',
'DROPMALFORMED',
'FAILFAST']] = None,
columnNameOfCorruptRecord: typing.Optional[str] = None,
dateFormat: typing.Optional[str] = None,
timestampFormat: typing.Optional[str] = None,
multiLine: typing.Optional[typing.Union[bool,
str]] = None,
allowUnquotedControlChars: typing.Optional[typing.Union[bool,
str]] = None,
lineSep: typing.Optional[str] = None,
samplingRatio: typing.Optional[typing.Union[float,
str]] = None,
dropFieldIfAllNull: typing.Optional[typing.Union[bool,
str]] = None,
encoding: typing.Optional[str] = None,
locale: typing.Optional[str] = None,
allowNonNumericNumbers: typing.Optional[typing.Union[bool,
str]] = None
) → pydantic.BaseModel

Add a json asset to the datasource.

add_orc_asset

Signature

add_orc_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f30b0fd4ce0> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f30b0fd4da0> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f30b0fd4ef0> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f30b0fd50a0> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f30b0fd5160> = None,
pathGlobFilter: typing.Optional[typing.Union[bool,
str]] = None,
recursiveFileLookup: typing.Optional[typing.Union[bool,
str]] = None,
modifiedBefore: typing.Optional[typing.Union[bool,
str]] = None,
modifiedAfter: typing.Optional[typing.Union[bool,
str]] = None,
mergeSchema: typing.Optional[typing.Union[bool,
str]] = False
) → pydantic.BaseModel

Add an orc asset to the datasource.

add_parquet_asset

Signature

add_parquet_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f30b0ff87d0> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f30b0ff8890> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f30b0ff89e0> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f30b0ff8b90> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f30b0ff8c50> = None,
pathGlobFilter: typing.Optional[typing.Union[bool,
str]] = None,
recursiveFileLookup: typing.Optional[typing.Union[bool,
str]] = None,
modifiedBefore: typing.Optional[typing.Union[bool,
str]] = None,
modifiedAfter: typing.Optional[typing.Union[bool,
str]] = None,
mergeSchema: typing.Optional[typing.Union[bool,
str]] = None,
datetimeRebaseMode: typing.Optional[typing.Literal['EXCEPTION',
'CORRECTED',
'LEGACY']] = None,
int96RebaseMode: typing.Optional[typing.Literal['EXCEPTION',
'CORRECTED',
'LEGACY']] = None
) → pydantic.BaseModel

Add a parquet asset to the datasource.

add_text_asset

Signature

add_text_asset(
name: str,
*,
id: <pydantic.v1.fields.DeferredType object at 0x7f30b0ffbf80> = None,
order_by: <pydantic.v1.fields.DeferredType object at 0x7f30b1018080> = None,
batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7f30b10181d0> = None,
batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7f30b1018380> = None,
connect_options: <pydantic.v1.fields.DeferredType object at 0x7f30b1018440> = None,
pathGlobFilter: typing.Optional[typing.Union[bool,
str]] = None,
recursiveFileLookup: typing.Optional[typing.Union[bool,
str]] = None,
modifiedBefore: typing.Optional[typing.Union[bool,
str]] = None,
modifiedAfter: typing.Optional[typing.Union[bool,
str]] = None,
wholetext: bool = False,
lineSep: typing.Optional[str] = None
) → pydantic.BaseModel

Add a text asset to the datasource.

delete_asset

Signature

delete_asset(
name: str
)None

Removes the DataAsset referred to by asset_name from internal list of available DataAsset objects.

Parameters

NameDescription

name

name of DataAsset to be deleted.

get_asset

Signature

get_asset(
name: str
) → great_expectations.datasource.fluent.interfaces._DataAssetT

Returns the DataAsset referred to by asset_name

Parameters

NameDescription

name

name of DataAsset sought.

Returns

TypeDescription

great_expectations.datasource.fluent.interfaces._DataAssetT

if named "DataAsset" object exists; otherwise, exception is raised.