SparkS3Datasource
class great_expectations.datasource.fluent.SparkS3Datasource(*, type: Literal['spark_s3'] = 'spark_s3', name: str, id: Optional[uuid.UUID] = None, assets: List[Union[great_expectations.datasource.fluent.data_asset.path.spark.csv_asset.CSVAsset, great_expectations.datasource.fluent.data_asset.path.spark.csv_asset.DirectoryCSVAsset, great_expectations.datasource.fluent.data_asset.path.spark.parquet_asset.ParquetAsset, great_expectations.datasource.fluent.data_asset.path.spark.parquet_asset.DirectoryParquetAsset, great_expectations.datasource.fluent.data_asset.path.spark.orc_asset.ORCAsset, great_expectations.datasource.fluent.data_asset.path.spark.orc_asset.DirectoryORCAsset, great_expectations.datasource.fluent.data_asset.path.spark.json_asset.JSONAsset, great_expectations.datasource.fluent.data_asset.path.spark.json_asset.DirectoryJSONAsset, great_expectations.datasource.fluent.data_asset.path.spark.text_asset.TextAsset, great_expectations.datasource.fluent.data_asset.path.spark.text_asset.DirectoryTextAsset, great_expectations.datasource.fluent.data_asset.path.spark.delta_asset.DeltaAsset, great_expectations.datasource.fluent.data_asset.path.spark.delta_asset.DirectoryDeltaAsset]] = [], spark_config: Optional[Dict[pydantic.v1.types.StrictStr, Union[pydantic.v1.types.StrictStr, pydantic.v1.types.StrictInt, pydantic.v1.types.StrictFloat, pydantic.v1.types.StrictBool]]] = None, force_reuse_spark_context: bool = True, persist: bool = True, bucket: str, boto3_options: Dict[str, Union[great_expectations.datasource.fluent.config_str.ConfigStr, Any]] = )#
SparkS3Datasource is a subclass of SparkDatasource which connects to Amazon S3.
add_csv_asset(name: str, *, id: <pydantic.v1.fields.DeferredType object at 0x7fcf4568b4d0> = None, order_by: <pydantic.v1.fields.DeferredType object at 0x7fcf4568b590> = None, batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fcf4568b6e0> = None, batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fcf4568b890> = None, connect_options: <pydantic.v1.fields.DeferredType object at 0x7fcf4568b950> = None, pathGlobFilter: typing.Optional[typing.Union[bool, str]] = None, recursiveFileLookup: typing.Optional[typing.Union[bool, str]] = None, modifiedBefore: typing.Optional[typing.Union[bool, str]] = None, modifiedAfter: typing.Optional[typing.Union[bool, str]] = None, schema: typing.Optional[typing.Union[great_expectations.datasource.fluent.serializable_types.pyspark.SerializableStructType, str]] = None, sep: typing.Optional[str] = None, encoding: typing.Optional[str] = None, quote: typing.Optional[str] = None, escape: typing.Optional[str] = None, comment: typing.Optional[str] = None, header: typing.Optional[typing.Union[bool, str]] = None, inferSchema: typing.Optional[typing.Union[bool, str]] = None, ignoreLeadingWhiteSpace: typing.Optional[typing.Union[bool, str]] = None, ignoreTrailingWhiteSpace: typing.Optional[typing.Union[bool, str]] = None, nullValue: typing.Optional[str] = None, nanValue: typing.Optional[str] = None, positiveInf: typing.Optional[str] = None, negativeInf: typing.Optional[str] = None, dateFormat: typing.Optional[str] = None, timestampFormat: typing.Optional[str] = None, maxColumns: typing.Optional[typing.Union[int, str]] = None, maxCharsPerColumn: typing.Optional[typing.Union[int, str]] = None, maxMalformedLogPerPartition: typing.Optional[typing.Union[int, str]] = None, mode: typing.Optional[typing.Literal['PERMISSIVE', 'DROPMALFORMED', 'FAILFAST']] = None, columnNameOfCorruptRecord: typing.Optional[str] = None, multiLine: typing.Optional[typing.Union[bool, str]] = None, charToEscapeQuoteEscaping: typing.Optional[str] = None, samplingRatio: typing.Optional[typing.Union[float, str]] = None, enforceSchema: typing.Optional[typing.Union[bool, str]] = None, emptyValue: typing.Optional[str] = None, locale: typing.Optional[str] = None, lineSep: typing.Optional[str] = None, unescapedQuoteHandling: typing.Optional[typing.Literal['STOP_AT_CLOSING_QUOTE', 'BACK_TO_DELIMITER', 'STOP_AT_DELIMITER', 'SKIP_VALUE', 'RAISE_ERROR']] = None) pydantic.BaseModel #
Add a csv asset to the datasource.
add_delta_asset(name: str, *, id: <pydantic.v1.fields.DeferredType object at 0x7fcf456d3440> = None, order_by: <pydantic.v1.fields.DeferredType object at 0x7fcf456d3500> = None, batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fcf456d3650> = None, batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fcf456d3800> = None, connect_options: <pydantic.v1.fields.DeferredType object at 0x7fcf456d38c0> = None, timestampAsOf: typing.Optional[str] = None, versionAsOf: typing.Optional[str] = None) pydantic.BaseModel #
Add a delta asset to the datasource.
add_directory_csv_asset(name: str, *, id: <pydantic.v1.fields.DeferredType object at 0x7fcf456d1c40> = None, order_by: <pydantic.v1.fields.DeferredType object at 0x7fcf456d1d00> = None, batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fcf456d1e50> = None, batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fcf456d2000> = None, connect_options: <pydantic.v1.fields.DeferredType object at 0x7fcf456d20c0> = None, pathGlobFilter: typing.Optional[typing.Union[bool, str]] = None, recursiveFileLookup: typing.Optional[typing.Union[bool, str]] = None, modifiedBefore: typing.Optional[typing.Union[bool, str]] = None, modifiedAfter: typing.Optional[typing.Union[bool, str]] = None, schema: typing.Optional[typing.Union[great_expectations.datasource.fluent.serializable_types.pyspark.SerializableStructType, str]] = None, sep: typing.Optional[str] = None, encoding: typing.Optional[str] = None, quote: typing.Optional[str] = None, escape: typing.Optional[str] = None, comment: typing.Optional[str] = None, header: typing.Optional[typing.Union[bool, str]] = None, inferSchema: typing.Optional[typing.Union[bool, str]] = None, ignoreLeadingWhiteSpace: typing.Optional[typing.Union[bool, str]] = None, ignoreTrailingWhiteSpace: typing.Optional[typing.Union[bool, str]] = None, nullValue: typing.Optional[str] = None, nanValue: typing.Optional[str] = None, positiveInf: typing.Optional[str] = None, negativeInf: typing.Optional[str] = None, dateFormat: typing.Optional[str] = None, timestampFormat: typing.Optional[str] = None, maxColumns: typing.Optional[typing.Union[int, str]] = None, maxCharsPerColumn: typing.Optional[typing.Union[int, str]] = None, maxMalformedLogPerPartition: typing.Optional[typing.Union[int, str]] = None, mode: typing.Optional[typing.Literal['PERMISSIVE', 'DROPMALFORMED', 'FAILFAST']] = None, columnNameOfCorruptRecord: typing.Optional[str] = None, multiLine: typing.Optional[typing.Union[bool, str]] = None, charToEscapeQuoteEscaping: typing.Optional[str] = None, samplingRatio: typing.Optional[typing.Union[float, str]] = None, enforceSchema: typing.Optional[typing.Union[bool, str]] = None, emptyValue: typing.Optional[str] = None, locale: typing.Optional[str] = None, lineSep: typing.Optional[str] = None, unescapedQuoteHandling: typing.Optional[typing.Literal['STOP_AT_CLOSING_QUOTE', 'BACK_TO_DELIMITER', 'STOP_AT_DELIMITER', 'SKIP_VALUE', 'RAISE_ERROR']] = None, data_directory: pathlib.Path) pydantic.BaseModel #
Add a directory_csv asset to the datasource.
add_directory_delta_asset(name: str, *, id: <pydantic.v1.fields.DeferredType object at 0x7fcf456f0710> = None, order_by: <pydantic.v1.fields.DeferredType object at 0x7fcf456f07d0> = None, batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fcf456f0920> = None, batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fcf456f0ad0> = None, connect_options: <pydantic.v1.fields.DeferredType object at 0x7fcf456f0b90> = None, timestampAsOf: typing.Optional[str] = None, versionAsOf: typing.Optional[str] = None, data_directory: pathlib.Path) pydantic.BaseModel #
Add a directory_delta asset to the datasource.
add_directory_json_asset(name: str, *, id: <pydantic.v1.fields.DeferredType object at 0x7fcf4570a690> = None, order_by: <pydantic.v1.fields.DeferredType object at 0x7fcf4570a750> = None, batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fcf4570a8a0> = None, batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fcf4570aa50> = None, connect_options: <pydantic.v1.fields.DeferredType object at 0x7fcf4570ab10> = None, pathGlobFilter: typing.Optional[typing.Union[bool, str]] = None, recursiveFileLookup: typing.Optional[typing.Union[bool, str]] = None, modifiedBefore: typing.Optional[typing.Union[bool, str]] = None, modifiedAfter: typing.Optional[typing.Union[bool, str]] = None, schema: typing.Optional[typing.Union[great_expectations.datasource.fluent.serializable_types.pyspark.SerializableStructType, str]] = None, primitivesAsString: typing.Optional[typing.Union[bool, str]] = None, prefersDecimal: typing.Optional[typing.Union[bool, str]] = None, allowComments: typing.Optional[typing.Union[bool, str]] = None, allowUnquotedFieldNames: typing.Optional[typing.Union[bool, str]] = None, allowSingleQuotes: typing.Optional[typing.Union[bool, str]] = None, allowNumericLeadingZero: typing.Optional[typing.Union[bool, str]] = None, allowBackslashEscapingAnyCharacter: typing.Optional[typing.Union[bool, str]] = None, mode: typing.Optional[typing.Literal['PERMISSIVE', 'DROPMALFORMED', 'FAILFAST']] = None, columnNameOfCorruptRecord: typing.Optional[str] = None, dateFormat: typing.Optional[str] = None, timestampFormat: typing.Optional[str] = None, multiLine: typing.Optional[typing.Union[bool, str]] = None, allowUnquotedControlChars: typing.Optional[typing.Union[bool, str]] = None, lineSep: typing.Optional[str] = None, samplingRatio: typing.Optional[typing.Union[float, str]] = None, dropFieldIfAllNull: typing.Optional[typing.Union[bool, str]] = None, encoding: typing.Optional[str] = None, locale: typing.Optional[str] = None, allowNonNumericNumbers: typing.Optional[typing.Union[bool, str]] = None, data_directory: pathlib.Path) pydantic.BaseModel #
Add a directory_json asset to the datasource.
add_directory_orc_asset(name: str, *, id: <pydantic.v1.fields.DeferredType object at 0x7fcf45535a60> = None, order_by: <pydantic.v1.fields.DeferredType object at 0x7fcf45535b20> = None, batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fcf45535730> = None, batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fcf45535a00> = None, connect_options: <pydantic.v1.fields.DeferredType object at 0x7fcf45535a90> = None, pathGlobFilter: typing.Optional[typing.Union[bool, str]] = None, recursiveFileLookup: typing.Optional[typing.Union[bool, str]] = None, modifiedBefore: typing.Optional[typing.Union[bool, str]] = None, modifiedAfter: typing.Optional[typing.Union[bool, str]] = None, mergeSchema: typing.Optional[typing.Union[bool, str]] = False, data_directory: pathlib.Path) pydantic.BaseModel #
Add a directory_orc asset to the datasource.
add_directory_parquet_asset(name: str, *, id: <pydantic.v1.fields.DeferredType object at 0x7fcf455368a0> = None, order_by: <pydantic.v1.fields.DeferredType object at 0x7fcf45536990> = None, batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fcf45536960> = None, batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fcf455368d0> = None, connect_options: <pydantic.v1.fields.DeferredType object at 0x7fcf455362a0> = None, pathGlobFilter: typing.Optional[typing.Union[bool, str]] = None, recursiveFileLookup: typing.Optional[typing.Union[bool, str]] = None, modifiedBefore: typing.Optional[typing.Union[bool, str]] = None, modifiedAfter: typing.Optional[typing.Union[bool, str]] = None, mergeSchema: typing.Optional[typing.Union[bool, str]] = None, datetimeRebaseMode: typing.Optional[typing.Literal['EXCEPTION', 'CORRECTED', 'LEGACY']] = None, int96RebaseMode: typing.Optional[typing.Literal['EXCEPTION', 'CORRECTED', 'LEGACY']] = None, data_directory: pathlib.Path) pydantic.BaseModel #
Add a directory_parquet asset to the datasource.
add_directory_text_asset(name: str, *, id: <pydantic.v1.fields.DeferredType object at 0x7fcf45537710> = None, order_by: <pydantic.v1.fields.DeferredType object at 0x7fcf45537770> = None, batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fcf45537650> = None, batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fcf45537530> = None, connect_options: <pydantic.v1.fields.DeferredType object at 0x7fcf45537740> = None, pathGlobFilter: typing.Optional[typing.Union[bool, str]] = None, recursiveFileLookup: typing.Optional[typing.Union[bool, str]] = None, modifiedBefore: typing.Optional[typing.Union[bool, str]] = None, modifiedAfter: typing.Optional[typing.Union[bool, str]] = None, wholetext: bool = False, lineSep: typing.Optional[str] = None, data_directory: pathlib.Path) pydantic.BaseModel #
Add a directory_text asset to the datasource.
add_json_asset(name: str, *, id: <pydantic.v1.fields.DeferredType object at 0x7fcf45708050> = None, order_by: <pydantic.v1.fields.DeferredType object at 0x7fcf45708200> = None, batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fcf45708350> = None, batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fcf45708500> = None, connect_options: <pydantic.v1.fields.DeferredType object at 0x7fcf457085c0> = None, pathGlobFilter: typing.Optional[typing.Union[bool, str]] = None, recursiveFileLookup: typing.Optional[typing.Union[bool, str]] = None, modifiedBefore: typing.Optional[typing.Union[bool, str]] = None, modifiedAfter: typing.Optional[typing.Union[bool, str]] = None, schema: typing.Optional[typing.Union[great_expectations.datasource.fluent.serializable_types.pyspark.SerializableStructType, str]] = None, primitivesAsString: typing.Optional[typing.Union[bool, str]] = None, prefersDecimal: typing.Optional[typing.Union[bool, str]] = None, allowComments: typing.Optional[typing.Union[bool, str]] = None, allowUnquotedFieldNames: typing.Optional[typing.Union[bool, str]] = None, allowSingleQuotes: typing.Optional[typing.Union[bool, str]] = None, allowNumericLeadingZero: typing.Optional[typing.Union[bool, str]] = None, allowBackslashEscapingAnyCharacter: typing.Optional[typing.Union[bool, str]] = None, mode: typing.Optional[typing.Literal['PERMISSIVE', 'DROPMALFORMED', 'FAILFAST']] = None, columnNameOfCorruptRecord: typing.Optional[str] = None, dateFormat: typing.Optional[str] = None, timestampFormat: typing.Optional[str] = None, multiLine: typing.Optional[typing.Union[bool, str]] = None, allowUnquotedControlChars: typing.Optional[typing.Union[bool, str]] = None, lineSep: typing.Optional[str] = None, samplingRatio: typing.Optional[typing.Union[float, str]] = None, dropFieldIfAllNull: typing.Optional[typing.Union[bool, str]] = None, encoding: typing.Optional[str] = None, locale: typing.Optional[str] = None, allowNonNumericNumbers: typing.Optional[typing.Union[bool, str]] = None) pydantic.BaseModel #
Add a json asset to the datasource.
add_orc_asset(name: str, *, id: <pydantic.v1.fields.DeferredType object at 0x7fcf45534a10> = None, order_by: <pydantic.v1.fields.DeferredType object at 0x7fcf45534ad0> = None, batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fcf45534c20> = None, batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fcf45534dd0> = None, connect_options: <pydantic.v1.fields.DeferredType object at 0x7fcf45534e90> = None, pathGlobFilter: typing.Optional[typing.Union[bool, str]] = None, recursiveFileLookup: typing.Optional[typing.Union[bool, str]] = None, modifiedBefore: typing.Optional[typing.Union[bool, str]] = None, modifiedAfter: typing.Optional[typing.Union[bool, str]] = None, mergeSchema: typing.Optional[typing.Union[bool, str]] = False) pydantic.BaseModel #
Add an orc asset to the datasource.
add_parquet_asset(name: str, *, id: <pydantic.v1.fields.DeferredType object at 0x7fcf45535fd0> = None, order_by: <pydantic.v1.fields.DeferredType object at 0x7fcf45536540> = None, batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fcf45536510> = None, batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fcf45536480> = None, connect_options: <pydantic.v1.fields.DeferredType object at 0x7fcf45536360> = None, pathGlobFilter: typing.Optional[typing.Union[bool, str]] = None, recursiveFileLookup: typing.Optional[typing.Union[bool, str]] = None, modifiedBefore: typing.Optional[typing.Union[bool, str]] = None, modifiedAfter: typing.Optional[typing.Union[bool, str]] = None, mergeSchema: typing.Optional[typing.Union[bool, str]] = None, datetimeRebaseMode: typing.Optional[typing.Literal['EXCEPTION', 'CORRECTED', 'LEGACY']] = None, int96RebaseMode: typing.Optional[typing.Literal['EXCEPTION', 'CORRECTED', 'LEGACY']] = None) pydantic.BaseModel #
Add a parquet asset to the datasource.
add_text_asset(name: str, *, id: <pydantic.v1.fields.DeferredType object at 0x7fcf455370b0> = None, order_by: <pydantic.v1.fields.DeferredType object at 0x7fcf45537110> = None, batch_metadata: <pydantic.v1.fields.DeferredType object at 0x7fcf45536ff0> = None, batch_definitions: <pydantic.v1.fields.DeferredType object at 0x7fcf45536f00> = None, connect_options: <pydantic.v1.fields.DeferredType object at 0x7fcf455370e0> = None, pathGlobFilter: typing.Optional[typing.Union[bool, str]] = None, recursiveFileLookup: typing.Optional[typing.Union[bool, str]] = None, modifiedBefore: typing.Optional[typing.Union[bool, str]] = None, modifiedAfter: typing.Optional[typing.Union[bool, str]] = None, wholetext: bool = False, lineSep: typing.Optional[str] = None) pydantic.BaseModel #
Add a text asset to the datasource.
- delete_asset(name: str)None #
Removes the DataAsset referred to by asset_name from internal list of available DataAsset objects.
- Parameters
name – name of DataAsset to be deleted.
- get_asset(name: str)great_expectations.datasource.fluent.interfaces._DataAssetT #
Returns the DataAsset referred to by asset_name
- Parameters
name – name of DataAsset sought.
- Returns
_DataAssetT – if named “DataAsset” object exists; otherwise, exception is raised.