def save():Unit={
if(source.toLowerCase(Locale.ROOT)==DDLUtils.HIVE_PROVIDER){
thrownewAnalysisException("Hive data source can only be used with tables, you can not "+"write files of Hive data source directly.")}assertNotBucketed("save")//Given a provider name, look up the data source class definition
val cls =DataSource.lookupDataSource(source, df.sparkSession.sessionState.conf)if(classOf[DataSourceV2].isAssignableFrom(cls)){
val source = cls.newInstance().asInstanceOf[DataSourceV2]
source match {
case ws:WriteSupport=>
val sessionOptions =DataSourceV2Utils.extractSessionConfigs(
source,
df.sparkSession.sessionState.conf)
val options = sessionOptions ++ extraOptions
val writer = ws.createWriter(
UUID.randomUUID.toString, df.logicalPlan.output.toStructType, mode,newDataSourceOptions(options.asJava))if